From 30cd803978a945e038d9b6270b88154cd1d8eaa5 Mon Sep 17 00:00:00 2001 From: MathMan05 Date: Tue, 23 Dec 2025 16:34:00 -0600 Subject: [PATCH 001/151] feild error fixes --- src/util/util/FieldError.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/util/util/FieldError.ts b/src/util/util/FieldError.ts index 8f93b2b6e..eb91a0ecb 100644 --- a/src/util/util/FieldError.ts +++ b/src/util/util/FieldError.ts @@ -29,19 +29,19 @@ export type ErrorContent = { code: string; message: string }; export type ObjectErrorContent = { _errors: ErrorContent[] }; export function FieldErrors(fields: Record, errors?: ErrorObject[]) { - return new FieldError( - 50035, - "Invalid Form Body", - Object.values(fields).map(({ message, code }) => ({ + const errorObj: ErrorList = {}; + for (const [key, { message, code }] of Object.entries(fields)) { + errorObj[key] = { _errors: [ { message, code: code || "BASE_TYPE_INVALID", }, ], - })), - errors, - ); + }; + } + + return new FieldError(50035, "Invalid Form Body", errorObj, errors); } // TODO: implement Image data type: Data URI scheme that supports JPG, GIF, and PNG formats. An example Data URI format is: data:image/jpeg;base64,BASE64_ENCODED_JPEG_IMAGE_DATA @@ -51,7 +51,7 @@ export class FieldError extends Error { constructor( public code: string | number, public message: string, - public errors?: object, // TODO: I don't like this typing. + public errors?: ErrorList, public _ajvErrors?: ErrorObject[], ) { super(message); From 70cf9ac2dc673cc1183e6033a723d02e74b619dc Mon Sep 17 00:00:00 2001 From: Rory& Date: Tue, 20 Jan 2026 07:16:43 +0100 Subject: [PATCH 002/151] oops, left some .orig files in ajv patch --- patches/ajv+8.17.1.patch | 643 --------------------------------------- 1 file changed, 643 deletions(-) diff --git a/patches/ajv+8.17.1.patch b/patches/ajv+8.17.1.patch index 3bd5dc1d6..9cf6c1cf2 100644 --- a/patches/ajv+8.17.1.patch +++ b/patches/ajv+8.17.1.patch @@ -28,363 +28,6 @@ index 8fc94fd..abeeda9 100644 function parseBooleanToken(bool, fail) { return (cxt) => { const { gen, data } = cxt; -diff --git a/node_modules/ajv/dist/compile/jtd/parse.js.orig b/node_modules/ajv/dist/compile/jtd/parse.js.orig -new file mode 100644 -index 0000000..8fc94fd ---- /dev/null -+++ b/node_modules/ajv/dist/compile/jtd/parse.js.orig -@@ -0,0 +1,350 @@ -+"use strict"; -+Object.defineProperty(exports, "__esModule", { value: true }); -+const types_1 = require("./types"); -+const __1 = require(".."); -+const codegen_1 = require("../codegen"); -+const ref_error_1 = require("../ref_error"); -+const names_1 = require("../names"); -+const code_1 = require("../../vocabularies/code"); -+const ref_1 = require("../../vocabularies/jtd/ref"); -+const type_1 = require("../../vocabularies/jtd/type"); -+const parseJson_1 = require("../../runtime/parseJson"); -+const util_1 = require("../util"); -+const timestamp_1 = require("../../runtime/timestamp"); -+const genParse = { -+ elements: parseElements, -+ values: parseValues, -+ discriminator: parseDiscriminator, -+ properties: parseProperties, -+ optionalProperties: parseProperties, -+ enum: parseEnum, -+ type: parseType, -+ ref: parseRef, -+}; -+function compileParser(sch, definitions) { -+ const _sch = __1.getCompilingSchema.call(this, sch); -+ if (_sch) -+ return _sch; -+ const { es5, lines } = this.opts.code; -+ const { ownProperties } = this.opts; -+ const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties }); -+ const parseName = gen.scopeName("parse"); -+ const cxt = { -+ self: this, -+ gen, -+ schema: sch.schema, -+ schemaEnv: sch, -+ definitions, -+ data: names_1.default.data, -+ parseName, -+ char: gen.name("c"), -+ }; -+ let sourceCode; -+ try { -+ this._compilations.add(sch); -+ sch.parseName = parseName; -+ parserFunction(cxt); -+ gen.optimize(this.opts.code.optimize); -+ const parseFuncCode = gen.toString(); -+ sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${parseFuncCode}`; -+ const makeParse = new Function(`${names_1.default.scope}`, sourceCode); -+ const parse = makeParse(this.scope.get()); -+ this.scope.value(parseName, { ref: parse }); -+ sch.parse = parse; -+ } -+ catch (e) { -+ if (sourceCode) -+ this.logger.error("Error compiling parser, function code:", sourceCode); -+ delete sch.parse; -+ delete sch.parseName; -+ throw e; -+ } -+ finally { -+ this._compilations.delete(sch); -+ } -+ return sch; -+} -+exports.default = compileParser; -+const undef = (0, codegen_1._) `undefined`; -+function parserFunction(cxt) { -+ const { gen, parseName, char } = cxt; -+ gen.func(parseName, (0, codegen_1._) `${names_1.default.json}, ${names_1.default.jsonPos}, ${names_1.default.jsonPart}`, false, () => { -+ gen.let(names_1.default.data); -+ gen.let(char); -+ gen.assign((0, codegen_1._) `${parseName}.message`, undef); -+ gen.assign((0, codegen_1._) `${parseName}.position`, undef); -+ gen.assign(names_1.default.jsonPos, (0, codegen_1._) `${names_1.default.jsonPos} || 0`); -+ gen.const(names_1.default.jsonLen, (0, codegen_1._) `${names_1.default.json}.length`); -+ parseCode(cxt); -+ skipWhitespace(cxt); -+ gen.if(names_1.default.jsonPart, () => { -+ gen.assign((0, codegen_1._) `${parseName}.position`, names_1.default.jsonPos); -+ gen.return(names_1.default.data); -+ }); -+ gen.if((0, codegen_1._) `${names_1.default.jsonPos} === ${names_1.default.jsonLen}`, () => gen.return(names_1.default.data)); -+ jsonSyntaxError(cxt); -+ }); -+} -+function parseCode(cxt) { -+ let form; -+ for (const key of types_1.jtdForms) { -+ if (key in cxt.schema) { -+ form = key; -+ break; -+ } -+ } -+ if (form) -+ parseNullable(cxt, genParse[form]); -+ else -+ parseEmpty(cxt); -+} -+const parseBoolean = parseBooleanToken(true, parseBooleanToken(false, jsonSyntaxError)); -+function parseNullable(cxt, parseForm) { -+ const { gen, schema, data } = cxt; -+ if (!schema.nullable) -+ return parseForm(cxt); -+ tryParseToken(cxt, "null", parseForm, () => gen.assign(data, null)); -+} -+function parseElements(cxt) { -+ const { gen, schema, data } = cxt; -+ parseToken(cxt, "["); -+ const ix = gen.let("i", 0); -+ gen.assign(data, (0, codegen_1._) `[]`); -+ parseItems(cxt, "]", () => { -+ const el = gen.let("el"); -+ parseCode({ ...cxt, schema: schema.elements, data: el }); -+ gen.assign((0, codegen_1._) `${data}[${ix}++]`, el); -+ }); -+} -+function parseValues(cxt) { -+ const { gen, schema, data } = cxt; -+ parseToken(cxt, "{"); -+ gen.assign(data, (0, codegen_1._) `{}`); -+ parseItems(cxt, "}", () => parseKeyValue(cxt, schema.values)); -+} -+function parseItems(cxt, endToken, block) { -+ tryParseItems(cxt, endToken, block); -+ parseToken(cxt, endToken); -+} -+function tryParseItems(cxt, endToken, block) { -+ const { gen } = cxt; -+ gen.for((0, codegen_1._) `;${names_1.default.jsonPos}<${names_1.default.jsonLen} && ${jsonSlice(1)}!==${endToken};`, () => { -+ block(); -+ tryParseToken(cxt, ",", () => gen.break(), hasItem); -+ }); -+ function hasItem() { -+ tryParseToken(cxt, endToken, () => { }, jsonSyntaxError); -+ } -+} -+function parseKeyValue(cxt, schema) { -+ const { gen } = cxt; -+ const key = gen.let("key"); -+ parseString({ ...cxt, data: key }); -+ parseToken(cxt, ":"); -+ parsePropertyValue(cxt, key, schema); -+} -+function parseDiscriminator(cxt) { -+ const { gen, data, schema } = cxt; -+ const { discriminator, mapping } = schema; -+ parseToken(cxt, "{"); -+ gen.assign(data, (0, codegen_1._) `{}`); -+ const startPos = gen.const("pos", names_1.default.jsonPos); -+ const value = gen.let("value"); -+ const tag = gen.let("tag"); -+ tryParseItems(cxt, "}", () => { -+ const key = gen.let("key"); -+ parseString({ ...cxt, data: key }); -+ parseToken(cxt, ":"); -+ gen.if((0, codegen_1._) `${key} === ${discriminator}`, () => { -+ parseString({ ...cxt, data: tag }); -+ gen.assign((0, codegen_1._) `${data}[${key}]`, tag); -+ gen.break(); -+ }, () => parseEmpty({ ...cxt, data: value }) // can be discarded/skipped -+ ); -+ }); -+ gen.assign(names_1.default.jsonPos, startPos); -+ gen.if((0, codegen_1._) `${tag} === undefined`); -+ parsingError(cxt, (0, codegen_1.str) `discriminator tag not found`); -+ for (const tagValue in mapping) { -+ gen.elseIf((0, codegen_1._) `${tag} === ${tagValue}`); -+ parseSchemaProperties({ ...cxt, schema: mapping[tagValue] }, discriminator); -+ } -+ gen.else(); -+ parsingError(cxt, (0, codegen_1.str) `discriminator value not in schema`); -+ gen.endIf(); -+} -+function parseProperties(cxt) { -+ const { gen, data } = cxt; -+ parseToken(cxt, "{"); -+ gen.assign(data, (0, codegen_1._) `{}`); -+ parseSchemaProperties(cxt); -+} -+function parseSchemaProperties(cxt, discriminator) { -+ const { gen, schema, data } = cxt; -+ const { properties, optionalProperties, additionalProperties } = schema; -+ parseItems(cxt, "}", () => { -+ const key = gen.let("key"); -+ parseString({ ...cxt, data: key }); -+ parseToken(cxt, ":"); -+ gen.if(false); -+ parseDefinedProperty(cxt, key, properties); -+ parseDefinedProperty(cxt, key, optionalProperties); -+ if (discriminator) { -+ gen.elseIf((0, codegen_1._) `${key} === ${discriminator}`); -+ const tag = gen.let("tag"); -+ parseString({ ...cxt, data: tag }); // can be discarded, it is already assigned -+ } -+ gen.else(); -+ if (additionalProperties) { -+ parseEmpty({ ...cxt, data: (0, codegen_1._) `${data}[${key}]` }); -+ } -+ else { -+ parsingError(cxt, (0, codegen_1.str) `property ${key} not allowed`); -+ } -+ gen.endIf(); -+ }); -+ if (properties) { -+ const hasProp = (0, code_1.hasPropFunc)(gen); -+ const allProps = (0, codegen_1.and)(...Object.keys(properties).map((p) => (0, codegen_1._) `${hasProp}.call(${data}, ${p})`)); -+ gen.if((0, codegen_1.not)(allProps), () => parsingError(cxt, (0, codegen_1.str) `missing required properties`)); -+ } -+} -+function parseDefinedProperty(cxt, key, schemas = {}) { -+ const { gen } = cxt; -+ for (const prop in schemas) { -+ gen.elseIf((0, codegen_1._) `${key} === ${prop}`); -+ parsePropertyValue(cxt, key, schemas[prop]); -+ } -+} -+function parsePropertyValue(cxt, key, schema) { -+ parseCode({ ...cxt, schema, data: (0, codegen_1._) `${cxt.data}[${key}]` }); -+} -+function parseType(cxt) { -+ const { gen, schema, data, self } = cxt; -+ switch (schema.type) { -+ case "boolean": -+ parseBoolean(cxt); -+ break; -+ case "string": -+ parseString(cxt); -+ break; -+ case "timestamp": { -+ parseString(cxt); -+ const vts = (0, util_1.useFunc)(gen, timestamp_1.default); -+ const { allowDate, parseDate } = self.opts; -+ const notValid = allowDate ? (0, codegen_1._) `!${vts}(${data}, true)` : (0, codegen_1._) `!${vts}(${data})`; -+ const fail = parseDate -+ ? (0, codegen_1.or)(notValid, (0, codegen_1._) `(${data} = new Date(${data}), false)`, (0, codegen_1._) `isNaN(${data}.valueOf())`) -+ : notValid; -+ gen.if(fail, () => parsingError(cxt, (0, codegen_1.str) `invalid timestamp`)); -+ break; -+ } -+ case "float32": -+ case "float64": -+ parseNumber(cxt); -+ break; -+ default: { -+ const t = schema.type; -+ if (!self.opts.int32range && (t === "int32" || t === "uint32")) { -+ parseNumber(cxt, 16); // 2 ** 53 - max safe integer -+ if (t === "uint32") { -+ gen.if((0, codegen_1._) `${data} < 0`, () => parsingError(cxt, (0, codegen_1.str) `integer out of range`)); -+ } -+ } -+ else { -+ const [min, max, maxDigits] = type_1.intRange[t]; -+ parseNumber(cxt, maxDigits); -+ gen.if((0, codegen_1._) `${data} < ${min} || ${data} > ${max}`, () => parsingError(cxt, (0, codegen_1.str) `integer out of range`)); -+ } -+ } -+ } -+} -+function parseString(cxt) { -+ parseToken(cxt, '"'); -+ parseWith(cxt, parseJson_1.parseJsonString); -+} -+function parseEnum(cxt) { -+ const { gen, data, schema } = cxt; -+ const enumSch = schema.enum; -+ parseToken(cxt, '"'); -+ // TODO loopEnum -+ gen.if(false); -+ for (const value of enumSch) { -+ const valueStr = JSON.stringify(value).slice(1); // remove starting quote -+ gen.elseIf((0, codegen_1._) `${jsonSlice(valueStr.length)} === ${valueStr}`); -+ gen.assign(data, (0, codegen_1.str) `${value}`); -+ gen.add(names_1.default.jsonPos, valueStr.length); -+ } -+ gen.else(); -+ jsonSyntaxError(cxt); -+ gen.endIf(); -+} -+function parseNumber(cxt, maxDigits) { -+ const { gen } = cxt; -+ skipWhitespace(cxt); -+ gen.if((0, codegen_1._) `"-0123456789".indexOf(${jsonSlice(1)}) < 0`, () => jsonSyntaxError(cxt), () => parseWith(cxt, parseJson_1.parseJsonNumber, maxDigits)); -+} -+function parseBooleanToken(bool, fail) { -+ return (cxt) => { -+ const { gen, data } = cxt; -+ tryParseToken(cxt, `${bool}`, () => fail(cxt), () => gen.assign(data, bool)); -+ }; -+} -+function parseRef(cxt) { -+ const { gen, self, definitions, schema, schemaEnv } = cxt; -+ const { ref } = schema; -+ const refSchema = definitions[ref]; -+ if (!refSchema) -+ throw new ref_error_1.default(self.opts.uriResolver, "", ref, `No definition ${ref}`); -+ if (!(0, ref_1.hasRef)(refSchema)) -+ return parseCode({ ...cxt, schema: refSchema }); -+ const { root } = schemaEnv; -+ const sch = compileParser.call(self, new __1.SchemaEnv({ schema: refSchema, root }), definitions); -+ partialParse(cxt, getParser(gen, sch), true); -+} -+function getParser(gen, sch) { -+ return sch.parse -+ ? gen.scopeValue("parse", { ref: sch.parse }) -+ : (0, codegen_1._) `${gen.scopeValue("wrapper", { ref: sch })}.parse`; -+} -+function parseEmpty(cxt) { -+ parseWith(cxt, parseJson_1.parseJson); -+} -+function parseWith(cxt, parseFunc, args) { -+ partialParse(cxt, (0, util_1.useFunc)(cxt.gen, parseFunc), args); -+} -+function partialParse(cxt, parseFunc, args) { -+ const { gen, data } = cxt; -+ gen.assign(data, (0, codegen_1._) `${parseFunc}(${names_1.default.json}, ${names_1.default.jsonPos}${args ? (0, codegen_1._) `, ${args}` : codegen_1.nil})`); -+ gen.assign(names_1.default.jsonPos, (0, codegen_1._) `${parseFunc}.position`); -+ gen.if((0, codegen_1._) `${data} === undefined`, () => parsingError(cxt, (0, codegen_1._) `${parseFunc}.message`)); -+} -+function parseToken(cxt, tok) { -+ tryParseToken(cxt, tok, jsonSyntaxError); -+} -+function tryParseToken(cxt, tok, fail, success) { -+ const { gen } = cxt; -+ const n = tok.length; -+ skipWhitespace(cxt); -+ gen.if((0, codegen_1._) `${jsonSlice(n)} === ${tok}`, () => { -+ gen.add(names_1.default.jsonPos, n); -+ success === null || success === void 0 ? void 0 : success(cxt); -+ }, () => fail(cxt)); -+} -+function skipWhitespace({ gen, char: c }) { -+ gen.code((0, codegen_1._) `while((${c}=${names_1.default.json}[${names_1.default.jsonPos}],${c}===" "||${c}==="\\n"||${c}==="\\r"||${c}==="\\t"))${names_1.default.jsonPos}++;`); -+} -+function jsonSlice(len) { -+ return len === 1 -+ ? (0, codegen_1._) `${names_1.default.json}[${names_1.default.jsonPos}]` -+ : (0, codegen_1._) `${names_1.default.json}.slice(${names_1.default.jsonPos}, ${names_1.default.jsonPos}+${len})`; -+} -+function jsonSyntaxError(cxt) { -+ parsingError(cxt, (0, codegen_1._) `"unexpected token " + ${names_1.default.json}[${names_1.default.jsonPos}]`); -+} -+function parsingError({ gen, parseName }, msg) { -+ gen.assign((0, codegen_1._) `${parseName}.message`, msg); -+ gen.assign((0, codegen_1._) `${parseName}.position`, names_1.default.jsonPos); -+ gen.return(undef); -+} -+//# sourceMappingURL=parse.js.map -\ No newline at end of file diff --git a/node_modules/ajv/dist/compile/rules.js b/node_modules/ajv/dist/compile/rules.js index 82a591f..1ebd8fe 100644 --- a/node_modules/ajv/dist/compile/rules.js @@ -450,216 +93,6 @@ index 6d03e0d..a35a428 100644 default: return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`; } -diff --git a/node_modules/ajv/dist/compile/validate/dataType.js.orig b/node_modules/ajv/dist/compile/validate/dataType.js.orig -new file mode 100644 -index 0000000..6d03e0d ---- /dev/null -+++ b/node_modules/ajv/dist/compile/validate/dataType.js.orig -@@ -0,0 +1,203 @@ -+"use strict"; -+Object.defineProperty(exports, "__esModule", { value: true }); -+exports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0; -+const rules_1 = require("../rules"); -+const applicability_1 = require("./applicability"); -+const errors_1 = require("../errors"); -+const codegen_1 = require("../codegen"); -+const util_1 = require("../util"); -+var DataType; -+(function (DataType) { -+ DataType[DataType["Correct"] = 0] = "Correct"; -+ DataType[DataType["Wrong"] = 1] = "Wrong"; -+})(DataType || (exports.DataType = DataType = {})); -+function getSchemaTypes(schema) { -+ const types = getJSONTypes(schema.type); -+ const hasNull = types.includes("null"); -+ if (hasNull) { -+ if (schema.nullable === false) -+ throw new Error("type: null contradicts nullable: false"); -+ } -+ else { -+ if (!types.length && schema.nullable !== undefined) { -+ throw new Error('"nullable" cannot be used without "type"'); -+ } -+ if (schema.nullable === true) -+ types.push("null"); -+ } -+ return types; -+} -+exports.getSchemaTypes = getSchemaTypes; -+// eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents -+function getJSONTypes(ts) { -+ const types = Array.isArray(ts) ? ts : ts ? [ts] : []; -+ if (types.every(rules_1.isJSONType)) -+ return types; -+ throw new Error("type must be JSONType or JSONType[]: " + types.join(",")); -+} -+exports.getJSONTypes = getJSONTypes; -+function coerceAndCheckDataType(it, types) { -+ const { gen, data, opts } = it; -+ const coerceTo = coerceToTypes(types, opts.coerceTypes); -+ const checkTypes = types.length > 0 && -+ !(coerceTo.length === 0 && types.length === 1 && (0, applicability_1.schemaHasRulesForType)(it, types[0])); -+ if (checkTypes) { -+ const wrongType = checkDataTypes(types, data, opts.strictNumbers, DataType.Wrong); -+ gen.if(wrongType, () => { -+ if (coerceTo.length) -+ coerceData(it, types, coerceTo); -+ else -+ reportTypeError(it); -+ }); -+ } -+ return checkTypes; -+} -+exports.coerceAndCheckDataType = coerceAndCheckDataType; -+const COERCIBLE = new Set(["string", "number", "integer", "boolean", "null"]); -+function coerceToTypes(types, coerceTypes) { -+ return coerceTypes -+ ? types.filter((t) => COERCIBLE.has(t) || (coerceTypes === "array" && t === "array")) -+ : []; -+} -+function coerceData(it, types, coerceTo) { -+ const { gen, data, opts } = it; -+ const dataType = gen.let("dataType", (0, codegen_1._) `typeof ${data}`); -+ const coerced = gen.let("coerced", (0, codegen_1._) `undefined`); -+ if (opts.coerceTypes === "array") { -+ gen.if((0, codegen_1._) `${dataType} == 'object' && Array.isArray(${data}) && ${data}.length == 1`, () => gen -+ .assign(data, (0, codegen_1._) `${data}[0]`) -+ .assign(dataType, (0, codegen_1._) `typeof ${data}`) -+ .if(checkDataTypes(types, data, opts.strictNumbers), () => gen.assign(coerced, data))); -+ } -+ gen.if((0, codegen_1._) `${coerced} !== undefined`); -+ for (const t of coerceTo) { -+ if (COERCIBLE.has(t) || (t === "array" && opts.coerceTypes === "array")) { -+ coerceSpecificType(t); -+ } -+ } -+ gen.else(); -+ reportTypeError(it); -+ gen.endIf(); -+ gen.if((0, codegen_1._) `${coerced} !== undefined`, () => { -+ gen.assign(data, coerced); -+ assignParentData(it, coerced); -+ }); -+ function coerceSpecificType(t) { -+ switch (t) { -+ case "string": -+ gen -+ .elseIf((0, codegen_1._) `${dataType} == "number" || ${dataType} == "boolean"`) -+ .assign(coerced, (0, codegen_1._) `"" + ${data}`) -+ .elseIf((0, codegen_1._) `${data} === null`) -+ .assign(coerced, (0, codegen_1._) `""`); -+ return; -+ case "number": -+ gen -+ .elseIf((0, codegen_1._) `${dataType} == "boolean" || ${data} === null -+ || (${dataType} == "string" && ${data} && ${data} == +${data})`) -+ .assign(coerced, (0, codegen_1._) `+${data}`); -+ return; -+ case "integer": -+ gen -+ .elseIf((0, codegen_1._) `${dataType} === "boolean" || ${data} === null -+ || (${dataType} === "string" && ${data} && ${data} == +${data} && !(${data} % 1))`) -+ .assign(coerced, (0, codegen_1._) `+${data}`); -+ return; -+ case "boolean": -+ gen -+ .elseIf((0, codegen_1._) `${data} === "false" || ${data} === 0 || ${data} === null`) -+ .assign(coerced, false) -+ .elseIf((0, codegen_1._) `${data} === "true" || ${data} === 1`) -+ .assign(coerced, true); -+ return; -+ case "null": -+ gen.elseIf((0, codegen_1._) `${data} === "" || ${data} === 0 || ${data} === false`); -+ gen.assign(coerced, null); -+ return; -+ case "array": -+ gen -+ .elseIf((0, codegen_1._) `${dataType} === "string" || ${dataType} === "number" -+ || ${dataType} === "boolean" || ${data} === null`) -+ .assign(coerced, (0, codegen_1._) `[${data}]`); -+ } -+ } -+} -+function assignParentData({ gen, parentData, parentDataProperty }, expr) { -+ // TODO use gen.property -+ gen.if((0, codegen_1._) `${parentData} !== undefined`, () => gen.assign((0, codegen_1._) `${parentData}[${parentDataProperty}]`, expr)); -+} -+function checkDataType(dataType, data, strictNums, correct = DataType.Correct) { -+ const EQ = correct === DataType.Correct ? codegen_1.operators.EQ : codegen_1.operators.NEQ; -+ let cond; -+ switch (dataType) { -+ case "null": -+ return (0, codegen_1._) `${data} ${EQ} null`; -+ case "array": -+ cond = (0, codegen_1._) `Array.isArray(${data})`; -+ break; -+ case "object": -+ cond = (0, codegen_1._) `${data} && typeof ${data} == "object" && !Array.isArray(${data})`; -+ break; -+ case "integer": -+ cond = numCond((0, codegen_1._) `!(${data} % 1) && !isNaN(${data})`); -+ break; -+ case "number": -+ cond = numCond(); -+ break; -+ default: -+ return (0, codegen_1._) `typeof ${data} ${EQ} ${dataType}`; -+ } -+ return correct === DataType.Correct ? cond : (0, codegen_1.not)(cond); -+ function numCond(_cond = codegen_1.nil) { -+ return (0, codegen_1.and)((0, codegen_1._) `typeof ${data} == "number"`, _cond, strictNums ? (0, codegen_1._) `isFinite(${data})` : codegen_1.nil); -+ } -+} -+exports.checkDataType = checkDataType; -+function checkDataTypes(dataTypes, data, strictNums, correct) { -+ if (dataTypes.length === 1) { -+ return checkDataType(dataTypes[0], data, strictNums, correct); -+ } -+ let cond; -+ const types = (0, util_1.toHash)(dataTypes); -+ if (types.array && types.object) { -+ const notObj = (0, codegen_1._) `typeof ${data} != "object"`; -+ cond = types.null ? notObj : (0, codegen_1._) `!${data} || ${notObj}`; -+ delete types.null; -+ delete types.array; -+ delete types.object; -+ } -+ else { -+ cond = codegen_1.nil; -+ } -+ if (types.number) -+ delete types.integer; -+ for (const t in types) -+ cond = (0, codegen_1.and)(cond, checkDataType(t, data, strictNums, correct)); -+ return cond; -+} -+exports.checkDataTypes = checkDataTypes; -+const typeError = { -+ message: ({ schema }) => `must be ${schema}`, -+ params: ({ schema, schemaValue }) => typeof schema == "string" ? (0, codegen_1._) `{type: ${schema}}` : (0, codegen_1._) `{type: ${schemaValue}}`, -+}; -+function reportTypeError(it) { -+ const cxt = getTypeErrorContext(it); -+ (0, errors_1.reportError)(cxt, typeError); -+} -+exports.reportTypeError = reportTypeError; -+function getTypeErrorContext(it) { -+ const { gen, data, schema } = it; -+ const schemaCode = (0, util_1.schemaRefOrVal)(it, schema, "type"); -+ return { -+ gen, -+ keyword: "type", -+ data, -+ schema: schema.type, -+ schemaCode, -+ schemaValue: schemaCode, -+ parentSchema: schema, -+ params: {}, -+ it, -+ }; -+} -+//# sourceMappingURL=dataType.js.map -\ No newline at end of file diff --git a/node_modules/ajv/dist/refs/json-schema-2019-09/meta/validation.json b/node_modules/ajv/dist/refs/json-schema-2019-09/meta/validation.json index 7027a12..25679c8 100644 --- a/node_modules/ajv/dist/refs/json-schema-2019-09/meta/validation.json @@ -814,79 +247,3 @@ index 17a0b51..bc54aad 100644 case "float32": case "float64": cond = (0, codegen_1._) `typeof ${data} == "number"`; -diff --git a/node_modules/ajv/dist/vocabularies/jtd/type.js.orig b/node_modules/ajv/dist/vocabularies/jtd/type.js.orig -new file mode 100644 -index 0000000..17a0b51 ---- /dev/null -+++ b/node_modules/ajv/dist/vocabularies/jtd/type.js.orig -@@ -0,0 +1,69 @@ -+"use strict"; -+Object.defineProperty(exports, "__esModule", { value: true }); -+exports.intRange = void 0; -+const codegen_1 = require("../../compile/codegen"); -+const timestamp_1 = require("../../runtime/timestamp"); -+const util_1 = require("../../compile/util"); -+const metadata_1 = require("./metadata"); -+const error_1 = require("./error"); -+exports.intRange = { -+ int8: [-128, 127, 3], -+ uint8: [0, 255, 3], -+ int16: [-32768, 32767, 5], -+ uint16: [0, 65535, 5], -+ int32: [-2147483648, 2147483647, 10], -+ uint32: [0, 4294967295, 10], -+}; -+const error = { -+ message: (cxt) => (0, error_1.typeErrorMessage)(cxt, cxt.schema), -+ params: (cxt) => (0, error_1.typeErrorParams)(cxt, cxt.schema), -+}; -+function timestampCode(cxt) { -+ const { gen, data, it } = cxt; -+ const { timestamp, allowDate } = it.opts; -+ if (timestamp === "date") -+ return (0, codegen_1._) `${data} instanceof Date `; -+ const vts = (0, util_1.useFunc)(gen, timestamp_1.default); -+ const allowDateArg = allowDate ? (0, codegen_1._) `, true` : codegen_1.nil; -+ const validString = (0, codegen_1._) `typeof ${data} == "string" && ${vts}(${data}${allowDateArg})`; -+ return timestamp === "string" ? validString : (0, codegen_1.or)((0, codegen_1._) `${data} instanceof Date`, validString); -+} -+const def = { -+ keyword: "type", -+ schemaType: "string", -+ error, -+ code(cxt) { -+ (0, metadata_1.checkMetadata)(cxt); -+ const { data, schema, parentSchema, it } = cxt; -+ let cond; -+ switch (schema) { -+ case "boolean": -+ case "string": -+ cond = (0, codegen_1._) `typeof ${data} == ${schema}`; -+ break; -+ case "timestamp": { -+ cond = timestampCode(cxt); -+ break; -+ } -+ case "float32": -+ case "float64": -+ cond = (0, codegen_1._) `typeof ${data} == "number"`; -+ break; -+ default: { -+ const sch = schema; -+ cond = (0, codegen_1._) `typeof ${data} == "number" && isFinite(${data}) && !(${data} % 1)`; -+ if (!it.opts.int32range && (sch === "int32" || sch === "uint32")) { -+ if (sch === "uint32") -+ cond = (0, codegen_1._) `${cond} && ${data} >= 0`; -+ } -+ else { -+ const [min, max] = exports.intRange[sch]; -+ cond = (0, codegen_1._) `${cond} && ${data} >= ${min} && ${data} <= ${max}`; -+ } -+ } -+ } -+ cxt.pass(parentSchema.nullable ? (0, codegen_1.or)((0, codegen_1._) `${data} === null`, cond) : cond); -+ }, -+}; -+exports.default = def; -+//# sourceMappingURL=type.js.map -\ No newline at end of file From 7b48c2bdd15f4ca6fddbde2a09c622810841f7d3 Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 07:08:07 +0100 Subject: [PATCH 003/151] Fix tsconfig for tsgo --- tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tsconfig.json b/tsconfig.json index fa433514e..b320cf88f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,7 +6,7 @@ /* Projects */ "incremental": true /* Save .tsbuildinfo files to allow for incremental compilation of projects. */, // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - "tsBuildInfoFile": "./dist/.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + "tsBuildInfoFile": "./dist/src.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ From 00b2a18cdb1ce6b75f096d263e6b12d355158170 Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 07:09:14 +0100 Subject: [PATCH 004/151] Quick and dirty util scripts, trim node modules --- default.nix | 3 +- nix/trimNodeModules.sh | 251 ++++++++++++++++++++++++----------------- scripts/countDirs.cs | 123 ++++++++++++++++++++ scripts/countExts.cs | 91 +++++++++++++++ 4 files changed, 362 insertions(+), 106 deletions(-) create mode 100755 scripts/countDirs.cs create mode 100755 scripts/countExts.cs diff --git a/default.nix b/default.nix index 8f4280d80..af333012d 100644 --- a/default.nix +++ b/default.nix @@ -63,7 +63,8 @@ pkgs.buildNpmPackage { # remove packages not needed for production, or at least try to... npm prune --omit dev --no-save $npmInstallFlags "''${npmInstallFlagsArray[@]}" $npmFlags "''${npmFlagsArray[@]}" - ${./nix/trimNodeModules.sh} + rm -v dist/src.tsbuildinfo + time ${./nix/trimNodeModules.sh} # Copy outputs echo "Installing package into $out" diff --git a/nix/trimNodeModules.sh b/nix/trimNodeModules.sh index db057b117..c33e49597 100755 --- a/nix/trimNodeModules.sh +++ b/nix/trimNodeModules.sh @@ -1,123 +1,164 @@ # shellcheck shell=bash # We don't care about CLI scripts: +echo "Removing CLI scripts..." for f in ./node_modules/.bin/*; do rm -f "$(realpath $f)" $f done +find ./node_modules -type d -name '.bin' | while read -r dir; do + echo "Removing CLI scripts from $dir..." + for f in "$dir"/*; do + rm -f "$(realpath $f)" $f + done +done + +# Unused stuff in specific packages +time ( + echo -n "Removing unused module features: " + rm -rf ./node_modules/typeorm/browser && echo -n . + rm -rf ./node_modules/typeorm/cli && echo -n . + rm -rf ./node_modules/typeorm/driver/**/*.map && echo -n . + rm -rf ./node_modules/ajv/lib && echo -n . +# rm -rf ./node_modules/ajv/dist/refs && echo -n . +) + +time ( + echo -en "\nRemoving specific known large unneeded packages: " + rm -rf ./node_modules/typescript && echo -n . + rm -rf ./node_modules/@typescript/native-preview && echo -n . + rm -rf ./node_modules/ts-node && echo -n . + rm -rf ./node_modules/node-gyp && echo -n . + rm -rf ./node_modules/node-gyp-build-optional-packages && echo -n . + rm -rf ./node_modules/discord-protos/{discord_protos,scripts} && echo -n . + rm -f ./node_modules/.package-lock.json && echo -n . + rm -rf ./node_modules/@jimp/plugin-print/fonts && echo -n . # duplicated in dist/fonts + for i in ./node_modules/@jimp/*; do + if [ -d "$i/dist/commonjs" ]; then + rm -rf "$i/dist/commonjs" && echo -n . + fi + if [ -d "$i/dist/browser" ]; then + rm -rf "$i/dist/browser" && echo -n . + fi + done + + find ./node_modules -name '*.map' -type f -delete -printf . +) + # sources -echo "Removing source files..." -find ./node_modules -name '*.ts' -type f -delete -find ./node_modules -name '*.mts' -type f -delete -find ./node_modules -name '*.cts' -type f -delete -find ./node_modules -name '*.coffee' -type f -delete -find ./node_modules -name '*.rs' -type f -delete -find ./node_modules -name '*.c' -type f -delete -find ./node_modules -name '*.cc' -type f -delete -find ./node_modules -name '*.cpp' -type f -delete -find ./node_modules -name '*.h' -type f -delete -find ./node_modules -name '*.hh' -type f -delete -find ./node_modules -name '*.hpp' -type f -delete +time ( + echo -en "\nRemoving source files: " + # typescript + echo -en "\n - Typescript: " + find ./node_modules -regextype posix-extended -iregex '.*\.(ts|mts|cts|d\.ts)$' -type f -delete -printf . + # C/C++ + echo -en "\n - C/C++: " + find ./node_modules -regextype posix-extended -iregex '.*\.(c|cc|cpp|h|hh|hpp)$' -type f -delete -printf . + # rust + echo -en "\n - Rust: " + find ./node_modules -name '*.rs' -type f -delete -printf . + # coffeescript + echo -en "\n - Coffeescript: " + find ./node_modules -name '*.coffee' -type f -delete -printf . +) # standard C/C++ build artifacts -echo "Removing build artifacts..." -find ./node_modules -name '*.o' -type f -delete -find ./node_modules -name '*.a' -type f -delete -find ./node_modules -name '*.d' -type f -delete +time ( + echo -en "\nRemoving build artifacts: " + find ./node_modules -regextype posix-extended -iregex '.*\.(o|a|d|obj)$' -type f -delete -printf . +) # ??? -echo "Removing other random build artifacts..." -# find ./node_modules -wholename '*build/Release/obj' -type d -exec rm -rfv {} + -# find ./node_modules -wholename '*build/Release/obj.target' -type d -exec rm -rfv {} + -find ./node_modules -name 'obj' -type f -delete -find ./node_modules -name 'obj.target' -type f -delete -find ./node_modules -name '*.ar-file-list' -type f -delete -find ./node_modules -name '*.stamp' -type f -delete -find ./node_modules -name '*musl.node' -type f -delete -rm -rf ./node_modules/typescript -rm -rf ./node_modules/@typescript/native-preview -rm -rf ./node_modules/ts-node -rm -rf ./node_modules/node-gyp -rm -rf ./node_modules/node-gyp-build-optional-packages -rm -rf ./node_modules/discord-protos/{discord_protos,scripts} +time ( + echo -en "\nRemoving other random build artifacts: " + # find ./node_modules -wholename '*build/Release/obj' -type d -exec rm -rfv {} + + # find ./node_modules -wholename '*build/Release/obj.target' -type d -exec rm -rfv {} + + find ./node_modules -iname 'obj' -type f -delete -printf . + find ./node_modules -iname 'obj.target' -type f -delete -printf . + find ./node_modules -iname '*.ar-file-list' -type f -delete -printf . + find ./node_modules -iname '*.stamp' -type f -delete -printf . + find ./node_modules -iname '*musl.node' -type f -delete -printf . -# rm -rf ./node_modules/typescript-json-schema -rm -rf ./node_modules/node-gyp -find ./node_modules -name '@types' -type d -exec rm -rf {} + + # rm -rf ./node_modules/typescript-json-schema + rm -rf ./node_modules/node-gyp && echo -n . + find ./node_modules -iname '@types' -type d -exec rm -rf {} + -printf . +) -echo "Removing random common files..." -find ./node_modules -name 'test' -type d -exec rm -rf {} + -find ./node_modules -name 'tests' -type d -exec rm -rf {} + -find ./node_modules -name 'examples' -type d -exec rm -rf {} + -find ./node_modules -name 'coverage' -type d -exec rm -rf {} + # Why would you ship coverage reports in artifacts?? -find ./node_modules -name '.nyc_output' -type d -exec rm -rf {} + -find ./node_modules -name 'doc' -type d -exec rm -rf {} + -find ./node_modules -name 'docs' -type d -exec rm -rf {} + -find ./node_modules -name '.idea' -type d -exec rm -rf {} + -find ./node_modules -name '.github' -type d -exec rm -rf {} + -find ./node_modules -name '__snapshots__' -type d -exec rm -rf {} + -find ./node_modules -name '.turbo' -type d -exec rm -rf {} + -find ./node_modules -name '.tshy' -type d -exec rm -rf {} + -find ./node_modules -name '__image_snapshots__' -type d -exec rm -rf {} + -find ./node_modules -name '*.md' -type f -delete -find ./node_modules -name '*.markdown' -type f -delete -find ./node_modules -name '*~' -type f -delete # Someone forgot some editor buffers, lol -find ./node_modules -name 'requirements.txt' -type f -delete -find ./node_modules -name 'pyproject.toml' -type f -delete -find ./node_modules -name '*.py' -type f -delete -find ./node_modules -name '*.sh' -type f -delete -find ./node_modules -name '*.bat' -type f -delete -find ./node_modules -name '*.cmd' -type f -delete -find ./node_modules -name '*.in' -type f -delete -find ./node_modules -name '*.mk' -type f -delete -find ./node_modules -name '*.txt' -type f -delete -find ./node_modules -name '*.Makefile' -type f -delete -find ./node_modules -name '*.d.ts' -type f -delete -find ./node_modules -name '*.log' -type f -delete -find ./node_modules -name '*.tar.gz' -type f -delete -find ./node_modules -name '*.cs' -type f -delete -find ./node_modules -name '*.rc' -type f -delete -find ./node_modules -name '*.am' -type f -delete -find ./node_modules -name '*.fallback' -type f -delete -find ./node_modules -name '*.msc' -type f -delete -find ./node_modules -name '*.1' -type f -delete -find ./node_modules -name '*.m4' -type f -delete -find ./node_modules -name '*.vc' -type f -delete -find ./node_modules -name 'Makefile' -type f -delete -find ./node_modules -name 'Dockerfile*' -type f -delete -find ./node_modules -name 'tsconfig.*' -type f -delete -find ./node_modules -name '.travis.yml' -type f -delete -find ./node_modules -name '.prettier*' -type f -delete -find ./node_modules -name '.airtap.yml' -type f -delete -find ./node_modules -name '.eslintrc' -type f -delete -find ./node_modules -name '.eslintrc.yml' -type f -delete -find ./node_modules -name '.gitattributes' -type f -delete -find ./node_modules -name '.npmignore' -type f -delete -find ./node_modules -name '.nycrc' -type f -delete -find ./node_modules -name '.editorconfig' -type f -delete -find ./node_modules -name '.eslintignore' -type f -delete -find ./node_modules -name 'yarn.lock' -type f -delete -find ./node_modules -name 'CODEOWNERS' -type f -delete -find ./node_modules -name 'README' -type f -delete -find ./node_modules -name 'changelog' -type f -delete -find ./node_modules -name '*.stflow' -type f -delete -find ./node_modules -name '.docker*' -type f -delete -find ./node_modules -name 'deno.lock' -type f -delete -find ./node_modules -name 'configure' -type f -delete +time ( + echo "Removing random common files..." + echo -en "\n - other: " + find ./node_modules -regextype posix-extended -iregex '.*(\.(github|idea|devcontainer)|tests?|docs?|examples?)$' -type d -exec rm -rf {} + -printf . + find ./node_modules -regextype posix-extended -iregex '.*(__image_snapshots__|__snapshots__|__tests__|__fixtures__)$' -type d -exec rm -rf {} + -printf . + find ./node_modules -name '.tshy' -type d -exec rm -rf {} + -printf . + echo -en "\n - scripts: " + find ./node_modules -regextype posix-extended -iregex '.*\.(sh|cmd|bat|makefile|mk)$' -type f -delete -printf . + find ./node_modules -iname 'makefile' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.(in|py)$' -type f -delete -printf . + echo -en "\n - package locks: " + find ./node_modules -regextype posix-extended -iregex '.*(yarn\.lock|deno-lock\.json|deno\.lock|deno\.jsonc)$' -type f -delete -printf . + echo -en "\n - ignore files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(docker|git|npm|eslint|prettier)ignore$' -type f -delete -printf . + echo -en "\n - git metadata: " + find ./node_modules -regextype posix-extended -iregex '.*\.git(keep|attributes|modules)$' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*(codeowners|changelog)$' -type f -delete -printf . + echo -en "\n - README files: " + find ./node_modules -regextype posix-extended -iregex '.*readme(\.md|\.txt)?$' -type f -delete -printf . + find ./node_modules -iname 'readme' -type f -delete -printf . + echo -en "\n - RC files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(babel|eslint|prettier|npm|nvm|swc|stylelint|mocha|jshint|nyc|yarn|ncurc)rc(\.(yml|json|(m|c)?js))?$' -type f -delete -printf . + echo -en "\n - image files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(png|jpg|jpeg|gif|svg|ico|webp|bmp|tiff)$' -type f -delete -printf . + echo -en "\n - text files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(txt|rst|log|md|markdown|hbs|bnf)$' -type f -delete -printf . + echo -en "\n - IDE/editor config/buffer files: " + find ./node_modules -regextype posix-extended -iregex '.*(\.(swp|swo|eslintcache)|~)$' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.(vscode|editorconfig|pre-commit-config\.yaml)$' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.(vscode|idea)$' -type d -exec rm -rf {} + -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.jsdoc-conf\.json$' -type d -exec rm -rf {} + -printf . + find ./node_modules -iname '*.iml' -type f -delete -printf . + echo -en "\n - CI configuration files: " + find ./node_modules -regextype posix-extended -iregex '.*(travis|circleci|github|gitlab|airtap|appveyor|wercker|codeship|drone|semaphoreci|buildkite).*\.(yml|yaml)$' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.runkit_example\.js$' -type d -exec rm -rf {} + -printf . + find ./node_modules -regextype posix-extended -iregex '.*dockerfile.*' -type f -delete -printf . + echo -en "\n - TypeScript meta files: " + find ./node_modules -iname '*.tsbuildinfo' -type f -delete -printf . + find ./node_modules -iname 'tsconfig.json' -type f -delete -printf . + echo -en "\n - YAML/TOML/HTML/CSS files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(html|yml|yaml|toml|css)$' -type f -delete -printf . + echo -en "\n - Non-normalised JS files (ie. umd/amd/...): " + find ./node_modules -regextype posix-extended -iregex '.*\.(amd|umd|browser|web)\.(cjs|mjs|js)$' -type f -delete -printf . + echo -en "\n - Test/spec JS files: " + find ./node_modules -regextype posix-extended -iregex '.*\.(test|spec)\.(m|c)?js$' -type f -delete -printf . + find ./node_modules -regextype posix-extended -iregex '.*\.(conf|config)\.(m|c)?js$' -type f -delete -printf . +) -if false; then +if true; then # Probably dont do this, lol, only saves ~1M anyways # Purely for statistical purposes - find ./node_modules -name 'LICENSE' -type f -delete - find ./node_modules -name 'License' -type f -delete - find ./node_modules -name 'license' -type f -delete - find ./node_modules -name 'license.terms' -type f -delete - find ./node_modules -name 'LICENSE.txt' -type f -delete - find ./node_modules -name 'LICENSE.BSD' -type f -delete - find ./node_modules -name 'LICENSE.MIT' -type f -delete - find ./node_modules -name 'LICENSE.APACHE2' -type f -delete - find ./node_modules -name 'LICENSE-MIT.txt' -type f -delete + echo -en "\nRemoving license files: " + find ./node_modules -iname 'LICENSE' -type f -delete -printf . + find ./node_modules -iname 'license.terms' -type f -delete -printf . + find ./node_modules -iname 'LICENSE.txt' -type f -delete -printf . + find ./node_modules -iname 'LICENSE.BSD' -type f -delete -printf . + find ./node_modules -iname 'LICENSE.MIT' -type f -delete -printf . + find ./node_modules -iname 'LICENSE.APACHE2' -type f -delete -printf . + find ./node_modules -iname 'LICENSE-MIT.txt' -type f -delete -printf . + find ./node_modules -iname 'LICENSE-MIT' -type f -delete -printf . + find ./node_modules -iname '*LICENSE*' -type f -delete -printf . + find ./node_modules -iname '*LICENCE*' -type f -delete -printf . fi -echo "Removing empty directories..." -find node_modules -maxdepth 1 -type d -empty -delete -echo "Trimming complete." \ No newline at end of file +time ( + echo -en "\nReplacing all dotenv transitive dependencies with own version: " + find ./node_modules -regextype posix-extended -iregex '\./node_modules/.*/node_modules/dotenv' -type d | while read -r dir; do + echo -n . + echo "Replacing $dir" + rm -rf "$dir" + relpath=$(realpath --relative-to="$(dirname "$dir")" ./node_modules/dotenv) + ln -s "$relpath" "$dir" + done +) + +echo -en "\nRemoving empty directories: " +find ./node_modules -maxdepth 10 -type d -empty -delete -printf . +echo -e "\nTrimming complete. Stats:" +du -sh ./{,.}* . 2>/dev/null | sort -h diff --git a/scripts/countDirs.cs b/scripts/countDirs.cs new file mode 100755 index 000000000..a8e667bcc --- /dev/null +++ b/scripts/countDirs.cs @@ -0,0 +1,123 @@ +#!/usr/bin/env dotnet +#:property Nullable=enable +#:property PublishAOT=false +#:package ArcaneLibs@1.0.0-preview.20251207* + +using ArcaneLibs; +using System; +using System.IO; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; + +if(args.Length == 0) +{ + Console.WriteLine("Usage: countExts [options] "); + Console.WriteLine("Options:"); + Console.WriteLine(" --size Sort by total size per extension (descending)"); + Console.WriteLine(" --real-size Sort by estimated disk usage per extension (descending)"); + Console.WriteLine(" --count Sort by file count per extension (ascending)"); + Console.WriteLine(" --name Sort by extension name (ascending)"); + Console.WriteLine(" --reverse Reverse the sort order"); + return; +} + +// undo chdir by dotnet +Environment.CurrentDirectory = Environment.GetEnvironmentVariable("PWD") ?? Environment.CurrentDirectory; + +Dictionary dirSizes = []; +Dictionary dirRealSizes = []; +Dictionary dirCounts = []; +long totalSize = 0; +long totalRealSize = 0; + +void GetDirSize(string path) +{ + long dirSize = 0; + long realDirSize = 0; + foreach (var file in Directory.GetFiles(path)) { + var len = new FileInfo(file).Length; + dirSize += len; + // assuming 4KiB block size + realDirSize += ((len + 4095) / 4096) * 4096; + } + + // include filesystem entry sizes in real size + Glibc.stat(path, out var statbuf); + realDirSize += statbuf.st_size; + + totalSize += dirSize; + totalRealSize += realDirSize; + var dirName = Path.GetFileName(path).ToLower(); + + if (!dirCounts.ContainsKey(dirName)) + dirCounts[dirName] = 0; + dirCounts[dirName]++; + + if (!dirSizes.ContainsKey(dirName)) + dirSizes[dirName] = 0; + dirSizes[dirName] += dirSize; + + if (!dirRealSizes.ContainsKey(dirName)) + dirRealSizes[dirName] = 0; + dirRealSizes[dirName] += realDirSize; + + foreach (var dir in Directory.GetDirectories(path)) GetDirSize(dir); +} + +GetDirSize(args.First(a=>!a.StartsWith("--"))); +var extColWidth = dirCounts.Max(k => k.Key.Length) + 1; +var numColWidth = dirCounts.Max(k=>k.Value.ToString().Length) + 1; + +IEnumerable> sortedDirs = dirCounts.OrderByDescending(kvp => kvp.Value); + +if (args.Contains("--size")) + sortedDirs = dirCounts.OrderByDescending(kvp => dirSizes[kvp.Key]); +if (args.Contains("--real-size")) + sortedDirs = dirCounts.OrderByDescending(kvp => dirRealSizes[kvp.Key]); +if (args.Contains("--ext")) + sortedDirs = dirCounts.OrderBy(kvp => kvp.Key); +if (args.Contains("--count")) + sortedDirs = dirCounts.OrderBy(kvp => kvp.Value); + +if (args.Contains("--reverse")) + sortedDirs = sortedDirs.Reverse(); + +if (args.Contains("--duplicate-only")) + sortedDirs = sortedDirs.Where(kvp => kvp.Value > 1); + +foreach (var kvp in sortedDirs) Console.WriteLine($"{kvp.Value.ToString().PadLeft(numColWidth)} {kvp.Key.PadRight(extColWidth)} Total Size: {Util.BytesToString(dirSizes[kvp.Key]).PadRight(12)} Est. usage: {Util.BytesToString(dirRealSizes[kvp.Key])}"); + +Console.WriteLine($"\nTotal unique directory names: {dirCounts.Count}"); +Console.WriteLine($"Total disk usage: {Util.BytesToString(totalSize)} (est. usage: {Util.BytesToString(totalRealSize)})"); + +static class Glibc +{ + // to get filesystem entry size for directories + [DllImport("libc", SetLastError = true)] + public static extern int stat(string path, out Stat buf); +} + +[StructLayout(LayoutKind.Sequential)] +public struct Stat +{ + public ulong st_dev; + public ulong st_ino; + public ulong st_nlink; + public uint st_mode; + public uint st_uid; + public uint st_gid; + public ulong st_rdev; + public long st_size; + public long st_blksize; + public long st_blocks; + public long st_atime; + public ulong st_atime_nsec; + public long st_mtime; + public ulong st_mtime_nsec; + public long st_ctime; + public ulong st_ctime_nsec; + public long __unused1; + public long __unused2; + public long __unused3; +} \ No newline at end of file diff --git a/scripts/countExts.cs b/scripts/countExts.cs new file mode 100755 index 000000000..af448edfe --- /dev/null +++ b/scripts/countExts.cs @@ -0,0 +1,91 @@ +#!/usr/bin/env dotnet +#:property Nullable=enable +#:property PublishAOT=false +#:package ArcaneLibs@1.0.0-preview.20251207* + +using ArcaneLibs; +using System; +using System.IO; +using System.Collections.Generic; +using System.Linq; + +if(args.Length == 0) +{ + Console.WriteLine("Usage: countExts [options] "); + Console.WriteLine("Options:"); + Console.WriteLine(" --size Sort by total size per extension (descending)"); + Console.WriteLine(" --real-size Sort by estimated disk usage per extension (descending)"); + Console.WriteLine(" --ext Sort by extension name (ascending)"); + Console.WriteLine(" --count Sort by file count per extension (ascending)"); + Console.WriteLine(" --by-filename Use full filename instead of extension for counting"); + Console.WriteLine(" --double-ext Consider double extensions (e.g., .test.js)"); + Console.WriteLine(" --filename-fallback Use full filename as extension if no extension found"); + return; +} + +// undo chdir by dotnet +Environment.CurrentDirectory = Environment.GetEnvironmentVariable("PWD") ?? Environment.CurrentDirectory; + +Dictionary extCounts = []; +Dictionary extSizes = []; +Dictionary extRealSizes = []; +long totalSize = 0; +long totalRealSize = 0; + +void CountExtensions(string path) +{ + foreach (var file in Directory.GetFiles(path)) + { + string ext = args.Contains("--by-filename") ? Path.GetFileName(file).ToLower() : Path.GetExtension(file).ToLower(); + + // handle double extensions, ie. .test.js, .min.js etc + if (args.Contains("--double-ext") && Path.GetFileName(file).Count(c => c == '.') >= 2) + { + var fname = Path.GetFileNameWithoutExtension(file); + var secondExt = Path.GetExtension(fname).ToLower(); + if (!string.IsNullOrEmpty(secondExt)) + ext = secondExt + ext; + } + + if(string.IsNullOrEmpty(ext)) + ext = args.Contains("--filename-fallback") ? Path.GetFileName(file).ToLower() : ""; + + if (!extCounts.ContainsKey(ext)) + extCounts[ext] = 0; + extCounts[ext]++; + + var fi = new FileInfo(file); + if (!extSizes.ContainsKey(ext)) + extSizes[ext] = 0; + extSizes[ext] += fi.Length; + totalSize += fi.Length; + + // Assuming 4KiB block size + if (!extRealSizes.ContainsKey(ext)) + extRealSizes[ext] = 0; + extRealSizes[ext] += (((fi.Length + 4095) / 4096) * 4096); + totalRealSize += (((fi.Length + 4095) / 4096) * 4096); + } + + foreach (var dir in Directory.GetDirectories(path)) CountExtensions(dir); +} + +CountExtensions(args.First(a=>!a.StartsWith("--"))); +var extColWidth = extCounts.Max(k => k.Key.Length) + 1; +var numColWidth = extCounts.Max(k=>k.Value.ToString().Length) + 1; + +var sortedExts = extCounts.OrderByDescending(kvp => kvp.Value); + +if (args.Contains("--size")) + sortedExts = extCounts.OrderByDescending(kvp => extSizes[kvp.Key]); +if (args.Contains("--real-size")) + sortedExts = extCounts.OrderByDescending(kvp => extRealSizes[kvp.Key]); +if (args.Contains("--ext")) + sortedExts = extCounts.OrderBy(kvp => kvp.Key); +if (args.Contains("--count")) + sortedExts = extCounts.OrderBy(kvp => kvp.Value); + +foreach (var kvp in sortedExts) Console.WriteLine($"{kvp.Value.ToString().PadLeft(numColWidth)} {kvp.Key.PadRight(extColWidth)} Total Size: {Util.BytesToString(extSizes[kvp.Key]).PadRight(12)} Est. usage: {Util.BytesToString(extRealSizes[kvp.Key])}"); + +Console.WriteLine($"\nTotal unique extensions: {extCounts.Count}"); +Console.WriteLine($"Total disk usage: {Util.BytesToString(totalSize)} (est. usage: {Util.BytesToString(totalRealSize)})"); \ No newline at end of file From bdf08b758f1bc1f2f0fe2a06e878f517ca093f7c Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 07:53:56 +0100 Subject: [PATCH 005/151] Nix: remove dependency on postgres service if host isnt running it --- nix/modules/default/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index d39919c32..093087fa2 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -223,7 +223,8 @@ in systemd.services.spacebar-apply-migrations = makeServerTsService { description = "Spacebar Server - Apply DB migrations"; - after = [ "network-online.target" "postgresql.service" ]; + after = [ "network-online.target" ] ++ lib.optional config.services.postgresql.enable "postgresql.service"; + requires = lib.optional config.services.postgresql.enable "postgresql.service"; environment = builtins.mapAttrs (_: val: builtins.toString val) ( cfg.extraEnvironment // { From 8451b2602d90db4027c41f1fd6e5edf55fc7f0d7 Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 08:27:10 +0100 Subject: [PATCH 006/151] network-online.target -> network.target? --- nix/modules/default/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index 093087fa2..a67d15620 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -223,7 +223,7 @@ in systemd.services.spacebar-apply-migrations = makeServerTsService { description = "Spacebar Server - Apply DB migrations"; - after = [ "network-online.target" ] ++ lib.optional config.services.postgresql.enable "postgresql.service"; + after = [ "network.target" ] ++ lib.optional config.services.postgresql.enable "postgresql.service"; requires = lib.optional config.services.postgresql.enable "postgresql.service"; environment = builtins.mapAttrs (_: val: builtins.toString val) ( cfg.extraEnvironment From a2a6e1924612bbc6f8ccebf7f798e2e6e22c7489 Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 08:42:42 +0100 Subject: [PATCH 007/151] Set 15s timeout for migrations --- nix/modules/default/default.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index a67d15620..d2134061e 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -237,6 +237,9 @@ in ExecStart = "${cfg.package}/bin/apply-migrations"; Type = "oneshot"; RemainAfterExit = true; + TimeoutStartSec = 15; + RestartSec = 1; + StartLimitBurst = 15; }; }; From d91d7f1ea70fb79be961f7a1abd4cc25a3e178fd Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 09:25:51 +0100 Subject: [PATCH 008/151] disable services as a test --- nix/modules/default/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index d2134061e..d32e855c5 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -106,7 +106,7 @@ in lib.recursiveUpdate (lib.recursiveUpdate { documentation = [ "https://docs.spacebar.chat/" ]; - wantedBy = [ "multi-user.target" ]; +# wantedBy = [ "multi-user.target" ]; wants = [ "network-online.target" ]; after = [ "network-online.target" ]; environment = From 0cc9cfab91179ea32dd8144be1c3b41c3072fa27 Mon Sep 17 00:00:00 2001 From: Rory& Date: Wed, 21 Jan 2026 09:33:48 +0100 Subject: [PATCH 009/151] reenable wantedby --- nix/modules/default/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index d32e855c5..d2134061e 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -106,7 +106,7 @@ in lib.recursiveUpdate (lib.recursiveUpdate { documentation = [ "https://docs.spacebar.chat/" ]; -# wantedBy = [ "multi-user.target" ]; + wantedBy = [ "multi-user.target" ]; wants = [ "network-online.target" ]; after = [ "network-online.target" ]; environment = From ec4222ee77b2a08452192bb905423fdb5ecf41c1 Mon Sep 17 00:00:00 2001 From: MathMan05 Date: Wed, 21 Jan 2026 12:11:10 -0600 Subject: [PATCH 010/151] bot fix --- src/util/util/Permissions.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util/util/Permissions.ts b/src/util/util/Permissions.ts index eb656f629..4cd6f89f6 100644 --- a/src/util/util/Permissions.ts +++ b/src/util/util/Permissions.ts @@ -139,7 +139,7 @@ export class Permissions extends BitField { static rolePermission(roles: Role[]) { // adds all permissions of all roles together (Bit OR) - return roles.reduce((permission, role) => permission | BigInt(role.permissions), BigInt(0)); + return roles.reduce((permission, role) => permission | BigInt(role.permissions || "0"), BigInt(0)); } static finalPermission({ From 88500e5fd3352dd092e452a72068c8e18028b826 Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 07:34:16 +0100 Subject: [PATCH 011/151] WIP: StopForumSpam support --- src/util/util/networking/index.ts | 1 + .../stopforumspam/StopForumSpamClient.ts | 131 ++++++++++++++++++ 2 files changed, 132 insertions(+) create mode 100644 src/util/util/networking/stopforumspam/StopForumSpamClient.ts diff --git a/src/util/util/networking/index.ts b/src/util/util/networking/index.ts index 717ba2a68..62e20d86b 100644 --- a/src/util/util/networking/index.ts +++ b/src/util/util/networking/index.ts @@ -17,3 +17,4 @@ */ export * from "./abuseipdb"; export * from "./ipdata"; +export * from "./stopforumspam/StopForumSpamClient"; diff --git a/src/util/util/networking/stopforumspam/StopForumSpamClient.ts b/src/util/util/networking/stopforumspam/StopForumSpamClient.ts new file mode 100644 index 000000000..7767915ac --- /dev/null +++ b/src/util/util/networking/stopforumspam/StopForumSpamClient.ts @@ -0,0 +1,131 @@ +/* + Spacebar: A FOSS re-implementation and extension of the Discord.com backend. + Copyright (C) 2025 Spacebar and Spacebar Contributors + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +import { DateBuilder } from "@spacebar/util"; + +// https://www.stopforumspam.com/usage +export class StopForumSpamClient { + private static stopForumSpamIpCache: Map< + string, + { + data: StopForumSpamResponse["ip"]; + expires: number; + } + > = new Map(); + private static stopForumSpamEmailCache: Map< + string, + { + data: StopForumSpamResponse["email"]; + expires: number; + } + > = new Map(); + private static stopForumSpamUsernameCache: Map< + string, + { + data: StopForumSpamResponse["username"]; + expires: number; + } + > = new Map(); + + public static async checkAsync(email?: string, ipAddress?: string, username?: string): Promise { + const params = new URLSearchParams(); + const cachedResults: StopForumSpamResponse = { success: 1 }; + if (email) { + const cachedEmail = StopForumSpamClient.stopForumSpamEmailCache.get(email); + if (cachedEmail && cachedEmail.expires > Date.now()) cachedResults.email = cachedEmail.data; + else params.append("email", email); + } + if (ipAddress) { + const cachedIp = StopForumSpamClient.stopForumSpamIpCache.get(ipAddress); + if (cachedIp && cachedIp.expires > Date.now()) cachedResults.ip = cachedIp.data; + else params.append("ip", ipAddress); + } + if (username) { + const cachedUsername = StopForumSpamClient.stopForumSpamUsernameCache.get(username); + if (cachedUsername && cachedUsername.expires > Date.now()) cachedResults.username = cachedUsername.data; + else params.append("username", username); + } + + if (params.toString() === "") { + // We don't need to fetch anything... + console.log("[StopForumSpamClient] Using cached results for all parameters:", { email, ipAddress, username }); + return cachedResults; + } + + const response = await fetch(`https://api.stopforumspam.org/api?${params.toString()}&json&confidence`, { + method: "GET", + }); + + if (!response.ok) { + console.error(`StopForumSpam API request failed with status ${response.status}`); + console.error(await response.text()); + throw new Error(`StopForumSpam API request failed with status ${response.status}`); + } + + const data = (await response.json()) as StopForumSpamResponse; + if (data.success !== 1) { + console.error("StopForumSpam API request was not successful", data); + throw new Error("StopForumSpam API request was not successful"); + } + + if (data.ip) + StopForumSpamClient.stopForumSpamIpCache.set(data.ip.value, { + data: data.ip, + expires: new DateBuilder().addHours(12).buildTimestamp(), + }); + + if (data.email) + StopForumSpamClient.stopForumSpamEmailCache.set(data.email.value, { + data: data.email, + expires: new DateBuilder().addHours(12).buildTimestamp(), + }); + + if (data.username) + StopForumSpamClient.stopForumSpamUsernameCache.set(data.username.value, { + data: data.username, + expires: new DateBuilder().addHours(12).buildTimestamp(), + }); + + return data; + } +} +export interface StopForumSpamResponse { + success: 0 | 1; + ip?: { + value: string; + appears: 0 | 1; + lastseen: string; + frequency: number; + confidence?: number; + delegated: string; + }; + email?: { + value: string; + appears: 0 | 1; + lastseen: string; + frequency: number; + confidence?: number; + }; + username?: { + value: string; + appears: 0 | 1; + lastseen: string; + frequency: number; + confidence?: number; + }; +} From 67318a4e9217e72b0061b0280f8ffbe9c26fc6f8 Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 07:35:29 +0100 Subject: [PATCH 012/151] Nix: dont depend on network.target --- nix/modules/default/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index d2134061e..4d4774e91 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -223,7 +223,7 @@ in systemd.services.spacebar-apply-migrations = makeServerTsService { description = "Spacebar Server - Apply DB migrations"; - after = [ "network.target" ] ++ lib.optional config.services.postgresql.enable "postgresql.service"; + after = lib.optional config.services.postgresql.enable "postgresql.service"; requires = lib.optional config.services.postgresql.enable "postgresql.service"; environment = builtins.mapAttrs (_: val: builtins.toString val) ( cfg.extraEnvironment From d8e2870e543101bdfb97aba06ec26913451b52c9 Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 07:41:26 +0100 Subject: [PATCH 013/151] Nix: dont set after/requires at all --- nix/modules/default/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nix/modules/default/default.nix b/nix/modules/default/default.nix index 4d4774e91..6bd1045a3 100644 --- a/nix/modules/default/default.nix +++ b/nix/modules/default/default.nix @@ -223,8 +223,8 @@ in systemd.services.spacebar-apply-migrations = makeServerTsService { description = "Spacebar Server - Apply DB migrations"; - after = lib.optional config.services.postgresql.enable "postgresql.service"; - requires = lib.optional config.services.postgresql.enable "postgresql.service"; +# after = lib.optional config.services.postgresql.enable "postgresql.service"; +# requires = lib.optional config.services.postgresql.enable "postgresql.service"; environment = builtins.mapAttrs (_: val: builtins.toString val) ( cfg.extraEnvironment // { From b3a595f25d0a99fcd440e3a7c2013abaf1a07074 Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 08:31:06 +0100 Subject: [PATCH 014/151] Apply-migrations: retry --- src/apply-migrations.ts | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/apply-migrations.ts b/src/apply-migrations.ts index 94f86320e..d43f67bf4 100644 --- a/src/apply-migrations.ts +++ b/src/apply-migrations.ts @@ -10,8 +10,22 @@ process.env.DB_LOGGING = "true"; import { closeDatabase, initDatabase } from "@spacebar/util"; -initDatabase().then(() => { - closeDatabase().then((r) => { - console.log("Successfully applied migrations!"); - }); -}); +async function main() { + let success = false; + while (!success) { + try { + await initDatabase().then(async () => { + await closeDatabase().then(async () => { + console.log("Successfully applied migrations!"); + success = true; + }); + }); + } catch (e) { + console.error("Failed to apply migrations, retrying in 2s...", e); + await new Promise((res) => setTimeout(res, 2000)); + await main(); + } + } +} + +main().then((r) => console.log("meow")); From 935f7960a676a11d60d4a5e3990c1920d747790e Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 19:42:24 +0100 Subject: [PATCH 015/151] Move health endpoints to /, deprecate /api/-. Fixes #1089 --- assets/openapi.json | 2 ++ scripts/schemaExclusions.json | 1 + src/api/Server.ts | 11 ++++++++++- src/api/routes/-/healthz.ts | 2 +- src/api/routes/-/readyz.ts | 2 +- 5 files changed, 15 insertions(+), 3 deletions(-) diff --git a/assets/openapi.json b/assets/openapi.json index fbadb7ab9..6f5112a0f 100644 --- a/assets/openapi.json +++ b/assets/openapi.json @@ -26001,6 +26001,7 @@ }, "/-/readyz/": { "get": { + "deprecated": true, "responses": { "default": { "description": "No description available" @@ -26019,6 +26020,7 @@ }, "/-/healthz/": { "get": { + "deprecated": true, "responses": { "default": { "description": "No description available" diff --git a/scripts/schemaExclusions.json b/scripts/schemaExclusions.json index 06ea1e74e..f25e8f071 100644 --- a/scripts/schemaExclusions.json +++ b/scripts/schemaExclusions.json @@ -48,6 +48,7 @@ "AbuseIpDbBlacklistResponse", "AbuseIpDbCheckResponse", "IpDataIpLookupResponse", + "StopForumSpamResponse", "recaptchaResponse", "hcaptchaResponse", "KeySchema" diff --git a/src/api/Server.ts b/src/api/Server.ts index 5fab5e5b5..bac34d619 100644 --- a/src/api/Server.ts +++ b/src/api/Server.ts @@ -16,7 +16,7 @@ along with this program. If not, see . */ -import { Config, ConnectionConfig, ConnectionLoader, Email, JSONReplacer, WebAuthn, initDatabase, initEvent, registerRoutes } from "@spacebar/util"; +import { Config, ConnectionConfig, ConnectionLoader, Email, JSONReplacer, WebAuthn, initDatabase, initEvent, registerRoutes, getDatabase } from "@spacebar/util"; import { Authentication, CORS, ImageProxy, BodyParser, ErrorHandler, initRateLimits, initTranslation } from "./middlewares"; import { Request, Response, Router } from "express"; import { Server, ServerOptions } from "lambert-server"; @@ -24,6 +24,7 @@ import morgan from "morgan"; import path from "path"; import { red } from "picocolors"; import { initInstance } from "./util/handlers/Instance"; +import { route } from "./util"; const ASSETS_FOLDER = path.join(__dirname, "..", "..", "assets"); const PUBLIC_ASSETS_FOLDER = path.join(ASSETS_FOLDER, "public"); @@ -182,6 +183,14 @@ export class SpacebarServer extends Server { }); }); + function isReady(req: Request, res: Response) { + if (!getDatabase()) return res.sendStatus(503); + return res.sendStatus(200); + } + + app.get("/readyz", route({ description: "Get the ready state of the server" }), isReady); + app.get("/healthz", route({ description: "Get the ready state of the server" }), isReady); + this.app.use(ErrorHandler); ConnectionLoader.loadConnections(); diff --git a/src/api/routes/-/healthz.ts b/src/api/routes/-/healthz.ts index 886473cf5..de45a29c4 100644 --- a/src/api/routes/-/healthz.ts +++ b/src/api/routes/-/healthz.ts @@ -22,7 +22,7 @@ import { getDatabase } from "@spacebar/util"; const router = Router({ mergeParams: true }); -router.get("/", route({}), (req: Request, res: Response) => { +router.get("/", route({ deprecated: true }), (req: Request, res: Response) => { if (!getDatabase()) return res.sendStatus(503); return res.sendStatus(200); diff --git a/src/api/routes/-/readyz.ts b/src/api/routes/-/readyz.ts index 886473cf5..de45a29c4 100644 --- a/src/api/routes/-/readyz.ts +++ b/src/api/routes/-/readyz.ts @@ -22,7 +22,7 @@ import { getDatabase } from "@spacebar/util"; const router = Router({ mergeParams: true }); -router.get("/", route({}), (req: Request, res: Response) => { +router.get("/", route({ deprecated: true }), (req: Request, res: Response) => { if (!getDatabase()) return res.sendStatus(503); return res.sendStatus(200); From cc2bcef03b4899ed5ad863a51b36447c4493c879 Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 20:25:33 +0100 Subject: [PATCH 016/151] CDN: use central cache middleware. Closes #1455 --- src/cdn/routes/attachments.ts | 4 ++-- src/cdn/routes/avatars.ts | 7 +++---- src/cdn/routes/badge-icons.ts | 4 ++-- src/cdn/routes/embed.ts | 7 +++---- src/cdn/routes/guild-profiles.ts | 7 +++---- src/cdn/routes/role-icons.ts | 7 +++---- src/cdn/util/cache.ts | 24 ++++++++++++++++++++++++ 7 files changed, 40 insertions(+), 20 deletions(-) create mode 100644 src/cdn/util/cache.ts diff --git a/src/cdn/routes/attachments.ts b/src/cdn/routes/attachments.ts index db2b95031..2bbf39b02 100644 --- a/src/cdn/routes/attachments.ts +++ b/src/cdn/routes/attachments.ts @@ -24,6 +24,7 @@ import { multer } from "../util/multer"; import { storage } from "../util/Storage"; import { CloudAttachment } from "../../util/entities/CloudAttachment"; import { fileTypeFromBuffer } from "file-type"; +import { cache } from "../util/cache"; const router = Router({ mergeParams: true }); @@ -69,7 +70,7 @@ router.post("/:channel_id", multer.single("file"), async (req: Request, res: Res return res.json(file); }); -router.get("/:channel_id/:id/:filename", async (req: Request, res: Response) => { +router.get("/:channel_id/:id/:filename", cache, async (req: Request, res: Response) => { const { channel_id, id, filename } = req.params; // const { format } = req.query; @@ -100,7 +101,6 @@ router.get("/:channel_id/:id/:filename", async (req: Request, res: Response) => } res.set("Content-Type", content_type); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); diff --git a/src/cdn/routes/avatars.ts b/src/cdn/routes/avatars.ts index c770007a7..f2aae6255 100644 --- a/src/cdn/routes/avatars.ts +++ b/src/cdn/routes/avatars.ts @@ -23,6 +23,7 @@ import { fileTypeFromBuffer } from "file-type"; import { HTTPError } from "lambert-server"; import crypto from "crypto"; import { multer } from "../util/multer"; +import { cache } from "../util/cache"; // TODO: check premium and animated pfp are allowed in the config // TODO: generate different sizes of icon @@ -60,7 +61,7 @@ router.post("/:user_id", multer.single("file"), async (req: Request, res: Respon }); }); -router.get("/:user_id", async (req: Request, res: Response) => { +router.get("/:user_id", cache, async (req: Request, res: Response) => { let { user_id } = req.params; user_id = user_id.split(".")[0]; // remove .file extension const path = `avatars/${user_id}`; @@ -70,7 +71,6 @@ router.get("/:user_id", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); @@ -86,12 +86,11 @@ export const getAvatar = async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }; -router.get("/:user_id/:hash", getAvatar); +router.get("/:user_id/:hash", cache, getAvatar); router.delete("/:user_id/:id", async (req: Request, res: Response) => { if (req.headers.signature !== Config.get().security.requestSignature) throw new HTTPError("Invalid request signature"); diff --git a/src/cdn/routes/badge-icons.ts b/src/cdn/routes/badge-icons.ts index ea7314a94..2d202d076 100644 --- a/src/cdn/routes/badge-icons.ts +++ b/src/cdn/routes/badge-icons.ts @@ -20,10 +20,11 @@ import { Router, Response, Request } from "express"; import { storage } from "../util/Storage"; import { HTTPError } from "lambert-server"; import { fileTypeFromBuffer } from "file-type"; +import { cache } from "../util/cache"; const router = Router({ mergeParams: true }); -router.get("/:badge_id", async (req: Request, res: Response) => { +router.get("/:badge_id", cache, async (req: Request, res: Response) => { const { badge_id } = req.params; const path = `badge-icons/${badge_id}`; @@ -32,7 +33,6 @@ router.get("/:badge_id", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000, must-revalidate"); return res.send(file); }); diff --git a/src/cdn/routes/embed.ts b/src/cdn/routes/embed.ts index 2a08da5a9..bc764389a 100644 --- a/src/cdn/routes/embed.ts +++ b/src/cdn/routes/embed.ts @@ -21,6 +21,7 @@ import fs from "fs/promises"; import { HTTPError } from "lambert-server"; import { join } from "path"; import { fileTypeFromBuffer } from "file-type"; +import { cache } from "../util/cache"; const defaultAvatarHashMap = new Map([ ["0", "4a8562cf00887030c416d3ec2d46385a"], @@ -58,7 +59,7 @@ async function getFile(path: string) { } } -router.get("/avatars/:id", async (req: Request, res: Response) => { +router.get("/avatars/:id", cache, async (req: Request, res: Response) => { let { id } = req.params; id = id.split(".")[0]; // remove .file extension const hash = defaultAvatarHashMap.get(id); @@ -70,12 +71,11 @@ router.get("/avatars/:id", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); -router.get("/group-avatars/:id", async (req: Request, res: Response) => { +router.get("/group-avatars/:id", cache, async (req: Request, res: Response) => { let { id } = req.params; id = id.split(".")[0]; // remove .file extension const hash = defaultGroupDMAvatarHashMap.get(id); @@ -87,7 +87,6 @@ router.get("/group-avatars/:id", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); diff --git a/src/cdn/routes/guild-profiles.ts b/src/cdn/routes/guild-profiles.ts index 6660b7208..f605ad7e6 100644 --- a/src/cdn/routes/guild-profiles.ts +++ b/src/cdn/routes/guild-profiles.ts @@ -23,6 +23,7 @@ import { HTTPError } from "lambert-server"; import { multer } from "../util/multer"; import { storage } from "../util/Storage"; import { fileTypeFromBuffer } from "file-type"; +import { cache } from "../util/cache"; // TODO: check premium and animated pfp are allowed in the config // TODO: generate different sizes of icon @@ -60,7 +61,7 @@ router.post("/", multer.single("file"), async (req: Request, res: Response) => { }); }); -router.get("/", async (req: Request, res: Response) => { +router.get("/", cache, async (req: Request, res: Response) => { const { guild_id } = req.params; let { user_id } = req.params; user_id = user_id.split(".")[0]; // remove .file extension @@ -71,12 +72,11 @@ router.get("/", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); -router.get("/:hash", async (req: Request, res: Response) => { +router.get("/:hash", cache, async (req: Request, res: Response) => { const { guild_id, user_id } = req.params; let { hash } = req.params; hash = hash.split(".")[0]; // remove .file extension @@ -87,7 +87,6 @@ router.get("/:hash", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000"); return res.send(file); }); diff --git a/src/cdn/routes/role-icons.ts b/src/cdn/routes/role-icons.ts index 1e00f9877..1f0c5b9fc 100644 --- a/src/cdn/routes/role-icons.ts +++ b/src/cdn/routes/role-icons.ts @@ -23,6 +23,7 @@ import { fileTypeFromBuffer } from "file-type"; import { HTTPError } from "lambert-server"; import crypto from "crypto"; import { multer } from "../util/multer"; +import { cache } from "../util/cache"; //Role icons ---> avatars.ts modified @@ -59,7 +60,7 @@ router.post("/:role_id", multer.single("file"), async (req: Request, res: Respon }); }); -router.get("/:role_id", async (req: Request, res: Response) => { +router.get("/:role_id", cache, async (req: Request, res: Response) => { const { role_id } = req.params; //role_id = role_id.split(".")[0]; // remove .file extension const path = `role-icons/${role_id}`; @@ -69,12 +70,11 @@ router.get("/:role_id", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000, must-revalidate"); return res.send(file); }); -router.get("/:role_id/:hash", async (req: Request, res: Response) => { +router.get("/:role_id/:hash", cache, async (req: Request, res: Response) => { const { role_id, hash } = req.params; //hash = hash.split(".")[0]; // remove .file extension const requested_extension = hash.split(".")[1]; @@ -92,7 +92,6 @@ router.get("/:role_id/:hash", async (req: Request, res: Response) => { const type = await fileTypeFromBuffer(file); res.set("Content-Type", type?.mime); - res.set("Cache-Control", "public, max-age=31536000, must-revalidate"); return res.send(file); }); diff --git a/src/cdn/util/cache.ts b/src/cdn/util/cache.ts new file mode 100644 index 000000000..3c3ea8fb2 --- /dev/null +++ b/src/cdn/util/cache.ts @@ -0,0 +1,24 @@ +/* + Spacebar: A FOSS re-implementation and extension of the Discord.com backend. + Copyright (C) 2025 Spacebar and Spacebar Contributors + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +import { NextFunction, Response, Request } from "express"; + +export function cache(req: Request, res: Response, next: NextFunction) { + const cacheDuration = 21600; // 6 hours + res.setHeader("Cache-Control", `public, max-age=${cacheDuration}, s-maxage=${cacheDuration}, immutable`); +} From 6eb1d2593fc2649501dcd77a3d0fc2d6488f5776 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Jan 2026 02:02:20 +0000 Subject: [PATCH 017/151] Bump undici in the npm_and_yarn group across 1 directory Bumps the npm_and_yarn group with 1 update in the / directory: [undici](https://github.com/nodejs/undici). Updates `undici` from 7.16.0 to 7.18.2 - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v7.16.0...v7.18.2) --- updated-dependencies: - dependency-name: undici dependency-version: 7.18.2 dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] --- package-lock.json | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/package-lock.json b/package-lock.json index ae5e58252..5c3e3b187 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2830,7 +2830,6 @@ "integrity": "sha512-sKYVuV7Sv9fbPIt/442koC7+IIwK5olP1KWeD88e/idgoJqDm3JV/YUiPwkoKK92ylff2MGxSz1CSjsXelx0YA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^5.0.0", @@ -2948,7 +2947,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.4.tgz", "integrity": "sha512-vnDVpYPMzs4wunl27jHrfmwojOGKya0xyM3sH+UE5iv5uPS6vX7UIoh6m+vQc5LGBq52HBKPIn/zcSZVzeDEZg==", "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -3084,7 +3082,6 @@ "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.51.0", "@typescript-eslint/types": "8.51.0", @@ -3484,7 +3481,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "devOptional": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -4723,7 +4719,6 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -7193,7 +7188,6 @@ "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", "license": "MIT", - "peer": true, "dependencies": { "pg-connection-string": "^2.9.1", "pg-pool": "^3.10.1", @@ -7267,7 +7261,6 @@ "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.10.3.tgz", "integrity": "sha512-h2utrzpOIzeT9JfaqfvBbVuvCfBjH86jNfVrGGTbyepKAIOyTfDew0lAt8bbJjs9n/I5bGDl7S2sx6h5hPyJxw==", "license": "MIT", - "peer": true, "dependencies": { "pg-cursor": "^2.15.3" }, @@ -7433,7 +7426,6 @@ "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "dev": true, "license": "MIT", - "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -8286,7 +8278,6 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -8402,7 +8393,6 @@ "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", "devOptional": true, "license": "MIT", - "peer": true, "dependencies": { "@cspotcode/source-map-support": "^0.8.0", "@tsconfig/node10": "^1.0.7", @@ -8667,7 +8657,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "devOptional": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8740,9 +8729,9 @@ } }, "node_modules/undici": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz", - "integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.18.2.tgz", + "integrity": "sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==", "license": "MIT", "engines": { "node": ">=20.18.1" From 31345651dbbebf0babe33667a37a297f731d24dc Mon Sep 17 00:00:00 2001 From: Rory& Date: Thu, 22 Jan 2026 21:34:01 +0100 Subject: [PATCH 018/151] Make openapi.js work offline - still needs conditionals fixed though --- assets/openapi.json | 192 ------------------ default.nix | 4 +- scripts/openapi.js | 15 +- src/api/routes/-/healthz.ts | 2 +- src/api/routes/-/readyz.ts | 2 +- src/api/routes/applications/detectable.ts | 1 + src/api/routes/auth/fingerprint.ts | 19 +- .../auth/generate-registration-tokens.ts | 1 + src/api/routes/auth/mfa/totp.ts | 1 + src/api/routes/auth/mfa/webauthn.ts | 1 + src/api/routes/auth/whoami.ts | 1 + src/api/routes/beaker.ts | 1 + src/api/routes/guild-recommendations.ts | 1 + src/api/routes/ping.ts | 1 + src/api/routes/policies/instance/config.ts | 1 + src/api/routes/policies/instance/domains.ts | 1 + src/api/routes/policies/instance/index.ts | 1 + src/api/routes/policies/instance/limits.ts | 1 + src/api/routes/policies/stats.ts | 1 + src/api/routes/reporting/index.ts | 2 + .../scheduled-maintenances/upcoming.json.ts | 18 +- src/api/routes/track.ts | 14 +- src/api/routes/users/@me/settings-proto/1.ts | 4 + src/api/routes/users/@me/settings-proto/2.ts | 4 + src/api/util/handlers/route.ts | 1 + 25 files changed, 72 insertions(+), 218 deletions(-) diff --git a/assets/openapi.json b/assets/openapi.json index 6f5112a0f..298ad18ec 100644 --- a/assets/openapi.json +++ b/assets/openapi.json @@ -13639,12 +13639,6 @@ ], "tags": [ "users" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] }, "patch": { @@ -13677,12 +13671,6 @@ }, "tags": [ "users" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -13795,12 +13783,6 @@ ], "tags": [ "users" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] }, "patch": { @@ -13833,12 +13815,6 @@ }, "tags": [ "users" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -15797,12 +15773,6 @@ }, "tags": [ "track" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16109,12 +16079,6 @@ }, "tags": [ "scheduled-maintenances" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16247,12 +16211,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16291,12 +16249,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16336,12 +16288,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16380,12 +16326,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16425,12 +16365,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16469,12 +16403,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16514,12 +16442,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16558,12 +16480,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16603,12 +16519,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16647,12 +16557,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16692,12 +16596,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16736,12 +16634,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16781,12 +16673,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16825,12 +16711,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16870,12 +16750,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16914,12 +16788,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -16959,12 +16827,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -17003,12 +16865,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -17048,12 +16904,6 @@ ], "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -17092,12 +16942,6 @@ }, "tags": [ "reporting" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -21828,12 +21672,6 @@ }, "tags": [ "guild-recommendations" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -24485,12 +24323,6 @@ }, "tags": [ "beaker" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -24800,12 +24632,6 @@ }, "tags": [ "auth" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -24845,12 +24671,6 @@ }, "tags": [ "auth" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -25026,12 +24846,6 @@ }, "tags": [ "auth" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, @@ -25156,12 +24970,6 @@ }, "tags": [ "applications" - ], - "x-badges": [ - { - "label": "Spacebar-only", - "color": "red" - } ] } }, diff --git a/default.nix b/default.nix index af333012d..691162a6b 100644 --- a/default.nix +++ b/default.nix @@ -18,6 +18,7 @@ let ./tsconfig.json ./assets ./patches + ./scripts ] ) ); @@ -41,7 +42,7 @@ pkgs.buildNpmPackage { npmDeps = pkgs.importNpmLock { npmRoot = filteredSrc; }; npmConfigHook = pkgs.importNpmLock.npmConfigHook; - npmBuildScript = "build:src:tsgo"; + npmBuildScript = "build:tsgo"; makeCacheWritable = true; nativeBuildInputs = with pkgs; [ (pkgs.python3.withPackages (ps: with ps; [ setuptools ])) @@ -64,6 +65,7 @@ pkgs.buildNpmPackage { # remove packages not needed for production, or at least try to... npm prune --omit dev --no-save $npmInstallFlags "''${npmInstallFlagsArray[@]}" $npmFlags "''${npmFlagsArray[@]}" rm -v dist/src.tsbuildinfo + rm -rv scripts time ${./nix/trimNodeModules.sh} # Copy outputs diff --git a/scripts/openapi.js b/scripts/openapi.js index 5afee152a..86d502e24 100644 --- a/scripts/openapi.js +++ b/scripts/openapi.js @@ -213,7 +213,7 @@ function apiRoutes(missingRoutes) { obj.tags = [...new Set([...(obj.tags || []), getTag(p)])]; - if (missingRoutes.additional.includes(path.replace(/\/$/, ""))) { + if (route.spacebarOnly === true || missingRoutes?.additional.includes(path.replace(/\/$/, ""))) { obj["x-badges"] = [ { label: "Spacebar-only", @@ -231,12 +231,13 @@ function apiRoutes(missingRoutes) { async function main() { console.log("Generating OpenAPI Specification..."); - const routesRes = await fetch("https://github.com/spacebarchat/missing-routes/raw/main/missing.json", { - headers: { - Accept: "application/json", - }, - }); - const missingRoutes = await routesRes.json(); + // const routesRes = await fetch("https://github.com/spacebarchat/missing-routes/raw/main/missing.json", { + // headers: { + // Accept: "application/json", + // }, + // }); + // const missingRoutes = await routesRes.json(); + let missingRoutes = undefined; combineSchemas(schemas); apiRoutes(missingRoutes); diff --git a/src/api/routes/-/healthz.ts b/src/api/routes/-/healthz.ts index de45a29c4..90f6054d5 100644 --- a/src/api/routes/-/healthz.ts +++ b/src/api/routes/-/healthz.ts @@ -22,7 +22,7 @@ import { getDatabase } from "@spacebar/util"; const router = Router({ mergeParams: true }); -router.get("/", route({ deprecated: true }), (req: Request, res: Response) => { +router.get("/", route({ deprecated: true, spacebarOnly: true }), (req: Request, res: Response) => { if (!getDatabase()) return res.sendStatus(503); return res.sendStatus(200); diff --git a/src/api/routes/-/readyz.ts b/src/api/routes/-/readyz.ts index de45a29c4..90f6054d5 100644 --- a/src/api/routes/-/readyz.ts +++ b/src/api/routes/-/readyz.ts @@ -22,7 +22,7 @@ import { getDatabase } from "@spacebar/util"; const router = Router({ mergeParams: true }); -router.get("/", route({ deprecated: true }), (req: Request, res: Response) => { +router.get("/", route({ deprecated: true, spacebarOnly: true }), (req: Request, res: Response) => { if (!getDatabase()) return res.sendStatus(503); return res.sendStatus(200); diff --git a/src/api/routes/applications/detectable.ts b/src/api/routes/applications/detectable.ts index 83122e761..688a7d125 100644 --- a/src/api/routes/applications/detectable.ts +++ b/src/api/routes/applications/detectable.ts @@ -34,6 +34,7 @@ router.get( body: "ApplicationDetectableResponse", }, }, + spacebarOnly: false, // not part of public openapi }), async (req: Request, res: Response) => { // cache for 6 hours diff --git a/src/api/routes/auth/fingerprint.ts b/src/api/routes/auth/fingerprint.ts index a4a49d079..2a77d3c7f 100644 --- a/src/api/routes/auth/fingerprint.ts +++ b/src/api/routes/auth/fingerprint.ts @@ -20,10 +20,17 @@ import { createHash } from "node:crypto"; import { Snowflake } from "@spacebar/util"; import { Request, Response, Router } from "express"; const router = Router({ mergeParams: true }); -router.post("/", route({ responses: { 200: { body: "CreateFingerprintResponse" } } }), (req: Request, res: Response) => { - const snowflake = Snowflake.generate(); - return res.json({ - fingerprint: `${snowflake}.${createHash("sha512").update(snowflake).digest("base64")}`, - }); -}); +router.post( + "/", + route({ + responses: { 200: { body: "CreateFingerprintResponse" } }, + spacebarOnly: false, // not part of public openapi + }), + (req: Request, res: Response) => { + const snowflake = Snowflake.generate(); + return res.json({ + fingerprint: `${snowflake}.${createHash("sha512").update(snowflake).digest("base64")}`, + }); + }, +); export default router; diff --git a/src/api/routes/auth/generate-registration-tokens.ts b/src/api/routes/auth/generate-registration-tokens.ts index a6b70c65b..1493c2dcb 100644 --- a/src/api/routes/auth/generate-registration-tokens.ts +++ b/src/api/routes/auth/generate-registration-tokens.ts @@ -38,6 +38,7 @@ router.get( }, right: "CREATE_REGISTRATION_TOKENS", responses: { 200: { body: "GenerateRegistrationTokensResponse" } }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const count = req.query.count ? parseInt(req.query.count as string) : 1; diff --git a/src/api/routes/auth/mfa/totp.ts b/src/api/routes/auth/mfa/totp.ts index 366fb1020..d50f2d0ea 100644 --- a/src/api/routes/auth/mfa/totp.ts +++ b/src/api/routes/auth/mfa/totp.ts @@ -36,6 +36,7 @@ router.post( body: "APIErrorResponse", }, }, + spacebarOnly: false, // not part of public openapi }), async (req: Request, res: Response) => { // const { code, ticket, gift_code_sku_id, login_source } = diff --git a/src/api/routes/auth/mfa/webauthn.ts b/src/api/routes/auth/mfa/webauthn.ts index 3359767c3..8e6c5f039 100644 --- a/src/api/routes/auth/mfa/webauthn.ts +++ b/src/api/routes/auth/mfa/webauthn.ts @@ -41,6 +41,7 @@ router.post( 200: { body: "TokenResponse" }, 400: { body: "APIErrorResponse" }, }, + spacebarOnly: false, // not part of public openapi }), async (req: Request, res: Response) => { if (!WebAuthn.fido2) { diff --git a/src/api/routes/auth/whoami.ts b/src/api/routes/auth/whoami.ts index 4d28b9fab..513663761 100644 --- a/src/api/routes/auth/whoami.ts +++ b/src/api/routes/auth/whoami.ts @@ -30,6 +30,7 @@ router.get( body: "WhoAmIResponse", }, }, + spacebarOnly: true, }), /* interface Request { diff --git a/src/api/routes/beaker.ts b/src/api/routes/beaker.ts index 4eb2941a1..22c1904f4 100644 --- a/src/api/routes/beaker.ts +++ b/src/api/routes/beaker.ts @@ -28,6 +28,7 @@ router.post( responses: { 204: {}, }, + spacebarOnly: false, // Not part of public openapi }), (req: Request, res: Response) => { // TODO: diff --git a/src/api/routes/guild-recommendations.ts b/src/api/routes/guild-recommendations.ts index 38ba10257..37ce779e9 100644 --- a/src/api/routes/guild-recommendations.ts +++ b/src/api/routes/guild-recommendations.ts @@ -32,6 +32,7 @@ router.get( body: "GuildRecommendationsResponse", }, }, + spacebarOnly: false, // Not part of public openapi schema }), async (req: Request, res: Response) => { // const { limit, personalization_disabled } = req.query; diff --git a/src/api/routes/ping.ts b/src/api/routes/ping.ts index be737f840..2a5ec6aa5 100644 --- a/src/api/routes/ping.ts +++ b/src/api/routes/ping.ts @@ -30,6 +30,7 @@ router.get( body: "InstancePingResponse", }, }, + spacebarOnly: true, }), (req: Request, res: Response) => { const { general } = Config.get(); diff --git a/src/api/routes/policies/instance/config.ts b/src/api/routes/policies/instance/config.ts index 946ac8aa8..686ecd1e2 100755 --- a/src/api/routes/policies/instance/config.ts +++ b/src/api/routes/policies/instance/config.ts @@ -30,6 +30,7 @@ router.get( body: "Object", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const general = Config.get(); diff --git a/src/api/routes/policies/instance/domains.ts b/src/api/routes/policies/instance/domains.ts index 339eedb1b..3d3303708 100644 --- a/src/api/routes/policies/instance/domains.ts +++ b/src/api/routes/policies/instance/domains.ts @@ -29,6 +29,7 @@ router.get( body: "InstanceDomainsResponse", }, }, + spacebarOnly: true, }), (req: Request, res: Response) => { const { cdn, gateway, api } = Config.get(); diff --git a/src/api/routes/policies/instance/index.ts b/src/api/routes/policies/instance/index.ts index bb04475f9..31e80bdf0 100644 --- a/src/api/routes/policies/instance/index.ts +++ b/src/api/routes/policies/instance/index.ts @@ -29,6 +29,7 @@ router.get( body: "APIGeneralConfiguration", }, }, + spacebarOnly: true, }), (req: Request, res: Response) => { const { general } = Config.get(); diff --git a/src/api/routes/policies/instance/limits.ts b/src/api/routes/policies/instance/limits.ts index 375c72175..380d9e1a5 100644 --- a/src/api/routes/policies/instance/limits.ts +++ b/src/api/routes/policies/instance/limits.ts @@ -29,6 +29,7 @@ router.get( body: "APILimitsConfiguration", }, }, + spacebarOnly: true, }), (req: Request, res: Response) => { const { limits } = Config.get(); diff --git a/src/api/routes/policies/stats.ts b/src/api/routes/policies/stats.ts index 0edcfd68f..9090d787a 100644 --- a/src/api/routes/policies/stats.ts +++ b/src/api/routes/policies/stats.ts @@ -32,6 +32,7 @@ router.get( body: "APIErrorResponse", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { if (!Config.get().security.statsWorldReadable) { diff --git a/src/api/routes/reporting/index.ts b/src/api/routes/reporting/index.ts index dbbd0004a..c4ae61169 100644 --- a/src/api/routes/reporting/index.ts +++ b/src/api/routes/reporting/index.ts @@ -57,6 +57,7 @@ for (const type of Object.values(ReportMenuTypeNames)) { }, 204: {}, }, + spacebarOnly: false, // Maps to /reporting/menu/:id }), (req: Request, res: Response) => { // TODO: implement @@ -76,6 +77,7 @@ for (const type of Object.values(ReportMenuTypeNames)) { }, 204: {}, }, + spacebarOnly: false, // Maps to /reporting/:id }), (req: Request, res: Response) => { // TODO: implement diff --git a/src/api/routes/scheduled-maintenances/upcoming.json.ts b/src/api/routes/scheduled-maintenances/upcoming.json.ts index b21507444..bf8fa5587 100644 --- a/src/api/routes/scheduled-maintenances/upcoming.json.ts +++ b/src/api/routes/scheduled-maintenances/upcoming.json.ts @@ -20,11 +20,17 @@ import { Router, Request, Response } from "express"; import { route } from "@spacebar/api"; const router = Router({ mergeParams: true }); -router.get("/", route({}), (req: Request, res: Response) => { - res.json({ - page: {}, - scheduled_maintenances: {}, - }); -}); +router.get( + "/", + route({ + spacebarOnly: false, // not part of public openapi + }), + (req: Request, res: Response) => { + res.json({ + page: {}, + scheduled_maintenances: {}, + }); + }, +); export default router; diff --git a/src/api/routes/track.ts b/src/api/routes/track.ts index d32061388..fe33b5028 100644 --- a/src/api/routes/track.ts +++ b/src/api/routes/track.ts @@ -21,9 +21,15 @@ import { route } from "@spacebar/api"; const router = Router({ mergeParams: true }); -router.post("/", route({}), (req: Request, res: Response) => { - // TODO: - res.sendStatus(204); -}); +router.post( + "/", + route({ + spacebarOnly: false, // Not part of the public OpenAPI schema + }), + (req: Request, res: Response) => { + // TODO: + res.sendStatus(204); + }, +); export default router; diff --git a/src/api/routes/users/@me/settings-proto/1.ts b/src/api/routes/users/@me/settings-proto/1.ts index d54bdf361..6df652a45 100644 --- a/src/api/routes/users/@me/settings-proto/1.ts +++ b/src/api/routes/users/@me/settings-proto/1.ts @@ -40,6 +40,7 @@ router.get( description: "Whether to try to apply the settings update atomically (default false)", }, }, + spacebarOnly: false, // maps to /users/@me/settings-proto/1 }), async (req: Request, res: Response) => { const userSettings = await UserSettingsProtos.getOrDefault(req.user_id); @@ -59,6 +60,7 @@ router.patch( body: "SettingsProtoUpdateResponse", }, }, + spacebarOnly: false, // maps to /users/@me/settings-proto/1 }), async (req: Request, res: Response) => { const { settings, required_data_version } = req.body as SettingsProtoUpdateSchema; @@ -84,6 +86,7 @@ router.get( body: "SettingsProtoJsonResponse", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const userSettings = await UserSettingsProtos.getOrDefault(req.user_id); @@ -109,6 +112,7 @@ router.patch( description: "Whether to try to apply the settings update atomically (default false)", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const { settings, required_data_version } = req.body as SettingsProtoUpdateJsonSchema; diff --git a/src/api/routes/users/@me/settings-proto/2.ts b/src/api/routes/users/@me/settings-proto/2.ts index 809db8487..ac3280e91 100644 --- a/src/api/routes/users/@me/settings-proto/2.ts +++ b/src/api/routes/users/@me/settings-proto/2.ts @@ -40,6 +40,7 @@ router.get( description: "Whether to try to apply the settings update atomically (default false)", }, }, + spacebarOnly: false, // maps to /users/@me/settings-proto/2 }), async (req: Request, res: Response) => { const userSettings = await UserSettingsProtos.getOrDefault(req.user_id); @@ -59,6 +60,7 @@ router.patch( body: "SettingsProtoUpdateResponse", }, }, + spacebarOnly: false, // maps to /users/@me/settings-proto/2 }), async (req: Request, res: Response) => { const { settings, required_data_version } = req.body as SettingsProtoUpdateSchema; @@ -84,6 +86,7 @@ router.get( body: "SettingsProtoJsonResponse", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const userSettings = await UserSettingsProtos.getOrDefault(req.user_id); @@ -109,6 +112,7 @@ router.patch( description: "Whether to try to apply the settings update atomically (default false)", }, }, + spacebarOnly: true, }), async (req: Request, res: Response) => { const { settings, required_data_version } = req.body as SettingsProtoUpdateJsonSchema; diff --git a/src/api/util/handlers/route.ts b/src/api/util/handlers/route.ts index 7ae00a555..022a522a0 100644 --- a/src/api/util/handlers/route.ts +++ b/src/api/util/handlers/route.ts @@ -64,6 +64,7 @@ export interface RouteOptions { }; }; deprecated?: boolean; + spacebarOnly?: boolean; // test?: { // response?: RouteResponse; // body?: unknown; From 3812f47ac26c17d3523b020917afb359c626e4f4 Mon Sep 17 00:00:00 2001 From: dank074 Date: Thu, 22 Jan 2026 17:06:30 -0600 Subject: [PATCH 019/151] add rabbitmq reconnection logic with state recovery --- src/api/middlewares/RateLimit.ts | 20 +++- src/gateway/listener/listener.ts | 94 +++++++++++++----- src/util/util/Event.ts | 88 ++++++++++++----- src/util/util/RabbitMQ.ts | 161 ++++++++++++++++++++++++------- 4 files changed, 272 insertions(+), 91 deletions(-) diff --git a/src/api/middlewares/RateLimit.ts b/src/api/middlewares/RateLimit.ts index c749df6ad..8a8c17f6b 100644 --- a/src/api/middlewares/RateLimit.ts +++ b/src/api/middlewares/RateLimit.ts @@ -16,7 +16,7 @@ along with this program. If not, see . */ -import { Config, getRights, listenEvent } from "@spacebar/util"; +import { Config, getRights, listenEvent, RabbitMQ } from "@spacebar/util"; import { NextFunction, Request, Response, Router } from "express"; import { API_PREFIX_TRAILING_SLASH } from "./Authentication"; @@ -154,9 +154,21 @@ export async function initRateLimits(app: Router) { const { routes, global, ip, error, enabled } = Config.get().limits.rate; if (!enabled) return; console.log("Enabling rate limits..."); - await listenEvent(EventRateLimit, (event) => { - Cache.set(event.channel_id as string, event.data); - event.acknowledge?.(); + + // Set up rate limit event listener + const setupRateLimitListener = async () => { + await listenEvent(EventRateLimit, (event) => { + Cache.set(event.channel_id as string, event.data); + event.acknowledge?.(); + }); + }; + + await setupRateLimitListener(); + + // Re-establish listener on RabbitMQ reconnection + RabbitMQ.on("reconnected", async () => { + console.log("[RateLimit] RabbitMQ reconnected, re-establishing rate limit listener"); + await setupRateLimitListener(); }); // await RateLimit.delete({ expires_at: LessThan(new Date().toISOString()) }); // cleans up if not already deleted, morethan -> older date // const limits = await RateLimit.find({ blocked: true }); diff --git a/src/gateway/listener/listener.ts b/src/gateway/listener/listener.ts index 1f0d386d2..41cf35e93 100644 --- a/src/gateway/listener/listener.ts +++ b/src/gateway/listener/listener.ts @@ -94,39 +94,79 @@ export async function setupListener(this: WebSocket) { console.error(`[RabbitMQ] [user-${this.user_id}] Channel Error (Handled):`, err); }; - if (RabbitMQ.connection) { - console.log("[RabbitMQ] setupListener: opts.channel = ", typeof opts.channel, "with channel id", opts.channel?.ch); - opts.channel = await RabbitMQ.connection.createChannel(); + // Function to set up all event listeners (used for initial setup and reconnection) + const setupEventListeners = async () => { + if (RabbitMQ.connection) { + console.log(`[RabbitMQ] [user-${this.user_id}] Setting up channel and event listeners`); + opts.channel = await RabbitMQ.connection.createChannel(); - opts.channel.on("error", handleChannelError); - opts.channel.queues = {}; - console.log("[RabbitMQ] channel created: ", typeof opts.channel, "with channel id", opts.channel?.ch); - } + opts.channel.on("error", handleChannelError); + opts.channel.queues = {}; + console.log("[RabbitMQ] channel created: ", typeof opts.channel, "with channel id", opts.channel?.ch); + } - this.events[this.user_id] = await listenEvent(this.user_id, consumer, opts); + this.events[this.user_id] = await listenEvent(this.user_id, consumer, opts); - relationships.forEach(async (relationship) => { - this.events[relationship.to_id] = await listenEvent(relationship.to_id, handlePresenceUpdate.bind(this), opts); - }); + for (const relationship of relationships) { + this.events[relationship.to_id] = await listenEvent(relationship.to_id, handlePresenceUpdate.bind(this), opts); + } - dm_channels.forEach(async (channel) => { - this.events[channel.id] = await listenEvent(channel.id, consumer, opts); - }); + for (const channel of dm_channels) { + this.events[channel.id] = await listenEvent(channel.id, consumer, opts); + } - guilds.forEach(async (guild) => { - const permission = await getPermission(this.user_id, guild.id); - this.permissions[guild.id] = permission; - this.events[guild.id] = await listenEvent(guild.id, consumer, opts); + for (const guild of guilds) { + const permission = await getPermission(this.user_id, guild.id); + this.permissions[guild.id] = permission; + this.events[guild.id] = await listenEvent(guild.id, consumer, opts); - guild.channels.forEach(async (channel) => { - if (permission.overwriteChannel(channel.permission_overwrites ?? []).has("VIEW_CHANNEL")) { - this.events[channel.id] = await listenEvent(channel.id, consumer, opts); + for (const channel of guild.channels) { + if (permission.overwriteChannel(channel.permission_overwrites ?? []).has("VIEW_CHANNEL")) { + this.events[channel.id] = await listenEvent(channel.id, consumer, opts); + } } - }); - }); + } + }; + + // Initial setup + await setupEventListeners(); + + // Handle RabbitMQ reconnection - re-establish all subscriptions + const handleReconnect = async () => { + console.log(`[RabbitMQ] [user-${this.user_id}] Connection restored, re-establishing subscriptions`); + try { + // Clear old event handlers (they're now invalid) + this.events = {}; + this.member_events = {}; + opts.channel = undefined; + + // re-establish all subscriptions + await setupEventListeners(); + console.log(`[RabbitMQ] [user-${this.user_id}] Successfully re-established subscriptions`); + } catch (e) { + console.error(`[RabbitMQ] [user-${this.user_id}] Failed to re-establish subscriptions:`, e); + // close the WebSocket - will force client to reconnect and redo subscription setup + this.close(4000, "Failed to re-establish event subscriptions"); + } + }; + + const handleDisconnect = () => { + console.log(`[RabbitMQ] [user-${this.user_id}] Connection lost, waiting for reconnection`); + // mark channel invalid + if (opts.channel) { + opts.channel.off("error", handleChannelError); + } + opts.channel = undefined; + }; + + // Subscribe to RabbitMQ connection events + RabbitMQ.on("reconnected", handleReconnect); + RabbitMQ.on("disconnected", handleDisconnect); this.once("close", async () => { - // console.log("[Events] setupListener: close for", this.user_id, "=", typeof opts.channel, "with channel id", opts.channel?.ch); + // Unsubscribe from RabbitMQ events + RabbitMQ.off("reconnected", handleReconnect); + RabbitMQ.off("disconnected", handleDisconnect); // wait for event consumer cancellation await Promise.all( @@ -138,7 +178,11 @@ export async function setupListener(this: WebSocket) { await Promise.all(Object.values(this.member_events).map((x) => x())); if (opts.channel) { - await opts.channel.close(); + try { + await opts.channel.close(); + } catch { + // Channel might already be closed + } opts.channel.off("error", handleChannelError); } }); diff --git a/src/util/util/Event.ts b/src/util/util/Event.ts index e2be9b196..cce72e7eb 100644 --- a/src/util/util/Event.ts +++ b/src/util/util/Event.ts @@ -36,19 +36,33 @@ export async function emitEvent(payload: Omit) { if (RabbitMQ.connection) { const data = typeof payload.data === "object" ? JSON.stringify(payload.data) : payload.data; // use rabbitmq for event transmission - const channel = await RabbitMQ.getSafeChannel(); - try { - await channel.assertExchange(id, "fanout", { - durable: false, - }); - // assertQueue isn't needed, because a queue will automatically created if it doesn't exist - const successful = channel.publish(id, "", Buffer.from(`${data}`), { type: payload.event }); - if (!successful) throw new Error("failed to send event"); - } catch (e) { - // todo: should we retry publishng the event? - console.log("[RabbitMQ] ", e); - } + const publishEvent = async (retryCount = 0): Promise => { + const channel = await RabbitMQ.getSafeChannel(); + try { + await channel.assertExchange(id, "fanout", { + durable: false, + }); + + // assertQueue isn't needed, because a queue will automatically created if it doesn't exist + const successful = channel.publish(id, "", Buffer.from(`${data}`), { type: payload.event }); + if (!successful) throw new Error("failed to send event"); + } catch (e) { + // Check if this is a channel closed error and if we should retry + const errorMessage = e instanceof Error ? e.message : String(e); + const isChannelError = errorMessage.includes("Channel closed") || errorMessage.includes("IllegalOperationError") || errorMessage.includes("RESOURCE_ERROR"); + + if (isChannelError && retryCount < 1) { + console.log("[RabbitMQ] Channel error detected, retrying with new channel..."); + // Force the cached channel to be discarded by calling getSafeChannel which will create a new one + return publishEvent(retryCount + 1); + } + + console.log("[RabbitMQ] ", e); + } + }; + + await publishEvent(); } else if (process.env.EVENT_TRANSMISSION === "unix" && process.env.EVENT_SOCKET_PATH) { if (!unixSocketWriter) { unixSocketWriter = new UnixSocketWriter(process.env.EVENT_SOCKET_PATH); @@ -64,19 +78,28 @@ export async function emitEvent(payload: Omit) { export async function initEvent() { await RabbitMQ.init(); // does nothing if rabbitmq is not setup - if (RabbitMQ.connection) { - // empty on purpose? - } else { - // use event emitter - // use process messages - } - await listenEvent("spacebar", async (event) => { - console.log("[Event] Received spacebar event:", event); - if ((event.event as string) === "SB_RELOAD_CONFIG") { - console.log("[Event] Reloading config due to RELOAD_CONFIG event"); - await Config.init(true); - } + // Set up the spacebar event listener (used for config reload, etc.) + const setupSpacebarListener = async () => { + if (!RabbitMQ.connection) return; + + console.log("[Event] Setting up spacebar event listener"); + await listenEvent("spacebar", async (event) => { + console.log("[Event] Received spacebar event:", event); + if ((event.event as string) === "SB_RELOAD_CONFIG") { + console.log("[Event] Reloading config due to RELOAD_CONFIG event"); + await Config.init(true); + } + }); + }; + + // Initial setup + await setupSpacebarListener(); + + // Re-establish listener on reconnection + RabbitMQ.on("reconnected", async () => { + console.log("[Event] RabbitMQ reconnected, re-establishing spacebar listener"); + await setupSpacebarListener(); }); } @@ -142,16 +165,29 @@ export async function listenEvent(event: string, callback: (event: EventOpts) => async function rabbitListen(channel: Channel, id: string, callback: (event: EventOpts) => unknown, opts?: { acknowledge?: boolean }): Promise<() => Promise> { await channel.assertExchange(id, "fanout", { durable: false }); + // messageTtl ensures any orphaned messages are cleaned up quickly if the consumer disconnects. const q = await channel.assertQueue("", { exclusive: true, autoDelete: true, + messageTtl: 5000, // Messages expire after 5 seconds if not consumed }); const consumerTag = randomUUID(); const cancel = async () => { - await channel.cancel(consumerTag); - await channel.unbindQueue(q.queue, id, ""); + try { + // Order matters here to prevent RESOURCE_ERROR: + // 1. Unbind first - stops new messages from being routed to this queue + await channel.unbindQueue(q.queue, id, ""); + // 2. Cancel consumer - with autoDelete: true, this triggers queue deletion + // after RabbitMQ ensures no messages are in-flight to this queue + await channel.cancel(consumerTag); + // Don't explicitly delete the queue - let autoDelete handle it safely. + // Explicitly deleting can race with in-flight message delivery. + } catch (e) { + // Channel might already be closed or queue already deleted - that's fine + console.log("[RabbitMQ] Error during consumer cancel (may be expected):", e instanceof Error ? e.message : e); + } }; await channel.bindQueue(q.queue, id, ""); diff --git a/src/util/util/RabbitMQ.ts b/src/util/util/RabbitMQ.ts index 777b77ee3..1b09e29db 100644 --- a/src/util/util/RabbitMQ.ts +++ b/src/util/util/RabbitMQ.ts @@ -18,75 +18,164 @@ import amqp, { Channel, ChannelModel } from "amqplib"; import { Config } from "./Config"; +import EventEmitter from "events"; export class RabbitMQ { public static connection: ChannelModel | null = null; public static channel: Channel | null = null; + // Event emitter for connection state changes + private static events = new EventEmitter(); + + // Reconnection state + private static isReconnecting = false; + private static reconnectAttempts = 0; + private static readonly MAX_RECONNECT_DELAY_MS = 30000; // Max 30 seconds between retries + private static readonly BASE_RECONNECT_DELAY_MS = 1000; // Start with 1 second + + // Track if event listeners have been set up (to avoid duplicates) + private static connectionListenersAttached = false; + + /** + * Subscribe to connection events. + * - 'reconnected': Fired after successful reconnection. Consumers should re-establish subscriptions. + * - 'disconnected': Fired when connection is lost. + */ + static on(event: "reconnected" | "disconnected", listener: () => void) { + this.events.on(event, listener); + } + + static off(event: "reconnected" | "disconnected", listener: () => void) { + this.events.off(event, listener); + } + static async init() { const host = Config.get().rabbitmq.host; if (!host) return; - console.log(`[RabbitMQ] connect: ${host}`); - this.connection = await amqp.connect(host, { - timeout: 1000 * 60, - }); - console.log(`[RabbitMQ] connected`); - // log connection errors + await this.connect(host); + } + + private static async connect(host: string): Promise { + try { + console.log(`[RabbitMQ] Connecting to: ${host}`); + this.connection = await amqp.connect(host, { + timeout: 1000 * 60, + }); + console.log(`[RabbitMQ] Connected successfully`); + + // Reset reconnection state on successful connect + this.reconnectAttempts = 0; + this.isReconnecting = false; + + // Only attach listeners once per connection object + if (!this.connectionListenersAttached) { + this.attachConnectionListeners(host); + this.connectionListenersAttached = true; + } + + // Pre-create the shared channel + await this.getSafeChannel(); + + // Notify subscribers that connection is (re-)established + this.events.emit("reconnected"); + } catch (error) { + console.error("[RabbitMQ] Connection failed:", error); + await this.scheduleReconnect(host); + throw error; + } + } + + private static attachConnectionListeners(host: string) { + if (!this.connection) return; + this.connection.on("error", (err) => { - console.error("[RabbitMQ] Connection Error:", err); + console.error("[RabbitMQ] Connection error:", err); + // Don't reconnect here - wait for 'close' event }); this.connection.on("close", () => { - console.error("[RabbitMQ] connection closed"); - // TODO: Add reconnection logic here if the connection crashes?? - // will be a pain since we will have to reconstruct entire state + console.error("[RabbitMQ] Connection closed"); + this.channel = null; + this.connection = null; + this.connectionListenersAttached = false; + + // Notify subscribers that connection is lost + this.events.emit("disconnected"); + + // Schedule reconnection + this.scheduleReconnect(host); }); + } + + private static async scheduleReconnect(host: string): Promise { + if (this.isReconnecting) { + console.log("[RabbitMQ] Reconnection already in progress, skipping"); + return; + } + + this.isReconnecting = true; + this.reconnectAttempts++; + + // Exponential backoff with jitter + const baseDelay = Math.min(this.BASE_RECONNECT_DELAY_MS * Math.pow(2, this.reconnectAttempts - 1), this.MAX_RECONNECT_DELAY_MS); + // Add jitter (±25%) to prevent thundering herd + const jitter = baseDelay * 0.25 * (Math.random() * 2 - 1); + const delay = Math.round(baseDelay + jitter); - await this.getSafeChannel(); // why is this here? + console.log(`[RabbitMQ] Scheduling reconnection attempt ${this.reconnectAttempts} in ${delay}ms`); + + await new Promise((resolve) => setTimeout(resolve, delay)); + + try { + await this.connect(host); + } catch { + // connect() will schedule another reconnect on failure + console.log("[RabbitMQ] Reconnection attempt failed, will retry"); + } } static async getSafeChannel(): Promise { - if (!this.connection) return Promise.reject(); - if (this.channel) return this.channel; + if (!this.connection) { + return Promise.reject(new Error("[RabbitMQ] No connection available")); + } + + // Check if cached channel is still usable + if (this.channel) { + // amqplib channels have a 'closed' property when closed + const isClosed = (this.channel as unknown as { closed?: boolean }).closed; + if (!isClosed) { + return this.channel; + } + console.log("[RabbitMQ] Cached channel is closed, creating new one"); + this.channel = null; + } try { this.channel = await this.connection.createChannel(); - console.log(`[RabbitMQ] channel created`); + console.log("[RabbitMQ] Channel created"); - // log channel errors this.channel.on("error", (err) => { - console.error("[RabbitMQ] Channel Error:", err); + console.error("[RabbitMQ] Channel error:", err); }); this.channel.on("close", () => { - console.log("[RabbitMQ] channel closed"); - this.channel = null; - }); - - this.connection.on("error", (err) => { - console.error("[RabbitMQ] connection error, setting channel to null and reconnecting:", err); + console.log("[RabbitMQ] Channel closed"); this.channel = null; - this.connection = null; - this.init(); - }); - - this.connection.on("close", () => { - console.log("[RabbitMQ] connection closed, setting channel to null and reconnecting"); - this.channel = null; - this.connection = null; - this.init(); }); return this.channel; } catch (e) { console.error("[RabbitMQ] Failed to create channel:", e); - console.error("[RabbitMQ] Forcing reconnect!"); - this.connection = null; this.channel = null; - await this.init(); - return await this.getSafeChannel(); - // return Promise.reject(e); + throw e; } } + + /** + * Check if RabbitMQ is currently connected and ready. + */ + static isConnected(): boolean { + return this.connection !== null && !this.isReconnecting; + } } From 9368066ec5fecd8e6da3eec713c2043a290d843a Mon Sep 17 00:00:00 2001 From: Rory& Date: Fri, 23 Jan 2026 09:18:54 +0100 Subject: [PATCH 020/151] Port User.tag getter from pomelo as utility --- assets/openapi.json | 4 ++++ assets/schemas.json | 4 ++++ src/api/routes/auth/forgot.ts | 2 +- src/api/routes/auth/verify/resend.ts | 2 +- src/util/entities/User.ts | 9 ++++++++- 5 files changed, 18 insertions(+), 3 deletions(-) diff --git a/assets/openapi.json b/assets/openapi.json index 298ad18ec..3c573188a 100644 --- a/assets/openapi.json +++ b/assets/openapi.json @@ -7627,6 +7627,9 @@ "type": "string" } }, + "tag": { + "type": "string" + }, "id": { "type": "string" } @@ -7658,6 +7661,7 @@ "security_keys", "sessions", "system", + "tag", "username", "verified", "webauthn_enabled" diff --git a/assets/schemas.json b/assets/schemas.json index a7d759fab..46a36939b 100644 --- a/assets/schemas.json +++ b/assets/schemas.json @@ -8097,6 +8097,9 @@ "type": "string" } }, + "tag": { + "type": "string" + }, "id": { "type": "string" } @@ -8129,6 +8132,7 @@ "security_keys", "sessions", "system", + "tag", "username", "verified", "webauthn_enabled" diff --git a/src/api/routes/auth/forgot.ts b/src/api/routes/auth/forgot.ts index 26e1b353b..4ebb96545 100644 --- a/src/api/routes/auth/forgot.ts +++ b/src/api/routes/auth/forgot.ts @@ -68,7 +68,7 @@ router.post( if (user && user.email) { Email.sendResetPassword(user, user.email).catch((e) => { - console.error(`Failed to send password reset email to ${user.username}#${user.discriminator} (${user.id}): ${e}`); + console.error(`Failed to send password reset email to ${user.tag} (${user.id}): ${e}`); }); } }, diff --git a/src/api/routes/auth/verify/resend.ts b/src/api/routes/auth/verify/resend.ts index 4c0339bf6..453c616d0 100644 --- a/src/api/routes/auth/verify/resend.ts +++ b/src/api/routes/auth/verify/resend.ts @@ -56,7 +56,7 @@ router.post( return res.sendStatus(204); }) .catch((e) => { - console.error(`Failed to send verification email to ${user.username}#${user.discriminator}: ${e}`); + console.error(`Failed to send verification email to ${user.tag}: ${e}`); throw new HTTPError("Failed to send verification email", 500); }); }, diff --git a/src/util/entities/User.ts b/src/util/entities/User.ts index 84471e0d4..d3c01c1a3 100644 --- a/src/util/entities/User.ts +++ b/src/util/entities/User.ts @@ -245,6 +245,13 @@ export class User extends BaseClass { } } + public get tag(): string { + //const { uniqueUsernames } = Config.get().general; + const uniqueUsernames = false; + + return uniqueUsernames ? this.username : `${this.username}#${this.discriminator}`; + } + static async register({ email, username, @@ -310,7 +317,7 @@ export class User extends BaseClass { // send verification email if users aren't verified by default and we have an email if (!Config.get().defaults.user.verified && email) { await Email.sendVerifyEmail(user, email).catch((e) => { - console.error(`Failed to send verification email to ${user.username}#${user.discriminator}: ${e}`); + console.error(`Failed to send verification email to ${user.tag}: ${e}`); }); } From 790282ebd8a18656f1da6569abc496f008d26092 Mon Sep 17 00:00:00 2001 From: Rory& Date: Fri, 23 Jan 2026 09:41:43 +0100 Subject: [PATCH 021/151] Dummy warp endpoint --- assets/openapi.json | 20 +++++++++++++++++ src/api/routes/warp/license.ts | 39 ++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 src/api/routes/warp/license.ts diff --git a/assets/openapi.json b/assets/openapi.json index 3c573188a..d641fe628 100644 --- a/assets/openapi.json +++ b/assets/openapi.json @@ -13042,6 +13042,9 @@ { "name": "voice" }, + { + "name": "warp" + }, { "name": "webhooks" } @@ -13502,6 +13505,23 @@ ] } }, + "/warp/license/": { + "post": { + "security": [ + { + "bearer": [] + } + ], + "responses": { + "204": { + "description": "No description available" + } + }, + "tags": [ + "warp" + ] + } + }, "/voice/regions/": { "get": { "security": [ diff --git a/src/api/routes/warp/license.ts b/src/api/routes/warp/license.ts new file mode 100644 index 000000000..7ac8eb5c8 --- /dev/null +++ b/src/api/routes/warp/license.ts @@ -0,0 +1,39 @@ +/* + Spacebar: A FOSS re-implementation and extension of the Discord.com backend. + Copyright (C) 2025 Spacebar and Spacebar Contributors + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +*/ + +import { route } from "@spacebar/api"; +import { Request, Response, Router } from "express"; + +const router = Router({ mergeParams: true }); + +// Don't care, maybe some day figure out the response schema, but we have no good way to respond to this +router.post( + "/", + route({ + responses: { + 204: {}, + }, + spacebarOnly: false, // Not part of public openapi + }), + (req: Request, res: Response) => { + // TODO: + res.sendStatus(204); + }, +); + +export default router; From 0ed83c0b6c10a2ee9b6fbf684e557922b8160381 Mon Sep 17 00:00:00 2001 From: Rory& Date: Fri, 23 Jan 2026 11:35:11 +0100 Subject: [PATCH 022/151] Identify: add self as recipient for DMs with nonexistnt users --- src/gateway/opcodes/Identify.ts | 46 +++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/src/gateway/opcodes/Identify.ts b/src/gateway/opcodes/Identify.ts index d6f146270..34732c381 100644 --- a/src/gateway/opcodes/Identify.ts +++ b/src/gateway/opcodes/Identify.ts @@ -16,11 +16,19 @@ along with this program. If not, see . */ -import { CLOSECODES, Capabilities, OPCODES, Payload, Send, WebSocket, setupListener } from "@spacebar/gateway"; +import { Capabilities, CLOSECODES, OPCODES, Payload, Send, setupListener, WebSocket } from "@spacebar/gateway"; import { Application, + Channel, + checkToken, Config, + CurrentTokenFormatVersion, + ElapsedTime, + emitEvent, + Emoji, EVENTEnum, + generateToken, + getDatabase, Guild, GuildOrUnavailable, Intents, @@ -33,32 +41,24 @@ import { ReadyGuildDTO, ReadyUserGuildSettingsEntries, Recipient, + Relationship, + Role, Session, SessionsReplace, - UserSettings, - checkToken, - emitEvent, - getDatabase, - TraceNode, - TraceRoot, + Sticker, Stopwatch, + timeFunction, timePromise, - ElapsedTime, - Channel, - Emoji, - Role, - Sticker, - VoiceState, + TraceNode, + TraceRoot, + UserSettings, UserSettingsProtos, - generateToken, - CurrentTokenFormatVersion, - Relationship, - timeFunction, + VoiceState, } from "@spacebar/util"; import { check } from "./instanceOf"; import { In, Not } from "typeorm"; import { PreloadedUserSettings } from "discord-protos"; -import { DefaultUserGuildSettings, DMChannel, IdentifySchema, PrivateUserProjection, PublicUser, PublicUserProjection } from "@spacebar/schemas"; +import { ChannelType, DefaultUserGuildSettings, DMChannel, IdentifySchema, PrivateUserProjection, PublicUser, PublicUserProjection } from "@spacebar/schemas"; // TODO: user sharding // TODO: check privileged intents, if defined in the config @@ -479,9 +479,17 @@ export async function onIdentify(this: WebSocket, data: Payload) { // Remove ourself from the list of other users in dm channel channel.recipients = channel.recipients.filter((recipient) => recipient.user.id !== this.user_id); - const channelUsers = channel.recipients?.map((recipient) => recipient.user.toPublicUser()); + let channelUsers = channel.recipients?.map((recipient) => recipient.user.toPublicUser()); if (channelUsers && channelUsers.length > 0) channelUsers.forEach((user) => users.add(user)); + // HACK: insert self into recipients for DMs with users that no longer exist + else if (channel.type === ChannelType.DM) { + const selfUser = user.toPublicUser(); + users.add(selfUser); + channelUsers ??= []; + channelUsers.push(selfUser); + } + return { id: channel.id, flags: channel.flags, From e52f4c3cd06ef9045870ae24340912f44e9d141a Mon Sep 17 00:00:00 2001 From: Rory& Date: Fri, 23 Jan 2026 12:00:34 +0100 Subject: [PATCH 023/151] cdn: actually execute controllers --- src/cdn/util/cache.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/cdn/util/cache.ts b/src/cdn/util/cache.ts index 3c3ea8fb2..a36f2c4c0 100644 --- a/src/cdn/util/cache.ts +++ b/src/cdn/util/cache.ts @@ -21,4 +21,5 @@ import { NextFunction, Response, Request } from "express"; export function cache(req: Request, res: Response, next: NextFunction) { const cacheDuration = 21600; // 6 hours res.setHeader("Cache-Control", `public, max-age=${cacheDuration}, s-maxage=${cacheDuration}, immutable`); + next(); } From f3f4a557f333c76e1cbda22b0808299211528f3a Mon Sep 17 00:00:00 2001 From: Rory& Date: Fri, 23 Jan 2026 17:06:03 +0100 Subject: [PATCH 024/151] Forbid dashes in emoji names --- src/api/routes/guilds/#guild_id/emojis.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/api/routes/guilds/#guild_id/emojis.ts b/src/api/routes/guilds/#guild_id/emojis.ts index 5bc03433c..663ce417b 100644 --- a/src/api/routes/guilds/#guild_id/emojis.ts +++ b/src/api/routes/guilds/#guild_id/emojis.ts @@ -107,6 +107,7 @@ router.post( if (emoji_count >= maxEmojis) throw DiscordApiErrors.MAXIMUM_NUMBER_OF_EMOJIS_REACHED.withParams(maxEmojis); if (body.require_colons == null) body.require_colons = true; + if (body.name?.includes("-")) body.name = body.name?.replaceAll("-", ""); // Dashes are invalid apparently const user = req.user; await handleFile(`/emojis/${id}`, body.image); @@ -155,6 +156,8 @@ router.patch( const { emoji_id, guild_id } = req.params; const body = req.body as EmojiModifySchema; + if (body.name?.includes("-")) body.name = body.name?.replaceAll("-", ""); // Dashes are invalid apparently + const emoji = await Emoji.create({ ...body, id: emoji_id, From 4dd959ad549fe9855ef338b1dbdedbb498bb9727 Mon Sep 17 00:00:00 2001 From: Rory& Date: Sat, 24 Jan 2026 19:27:39 +0100 Subject: [PATCH 025/151] Make bot invites configurable --- src/api/routes/guilds/#guild_id/members/#member_id/index.ts | 4 ++-- src/api/routes/invites/index.ts | 4 ++-- src/util/config/types/UsersConfiguration.ts | 1 + 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/api/routes/guilds/#guild_id/members/#member_id/index.ts b/src/api/routes/guilds/#guild_id/members/#member_id/index.ts index 6a9cfc559..7049857e7 100644 --- a/src/api/routes/guilds/#guild_id/members/#member_id/index.ts +++ b/src/api/routes/guilds/#guild_id/members/#member_id/index.ts @@ -17,7 +17,7 @@ */ import { route } from "@spacebar/api"; -import { DiscordApiErrors, emitEvent, Emoji, getPermission, getRights, Guild, GuildMemberUpdateEvent, handleFile, Member, Role, Sticker } from "@spacebar/util"; +import { Config, DiscordApiErrors, emitEvent, Emoji, getPermission, getRights, Guild, GuildMemberUpdateEvent, handleFile, Member, Role, Sticker } from "@spacebar/util"; import { Request, Response, Router } from "express"; import { MemberChangeSchema, PublicMemberProjection, PublicUserProjection } from "@spacebar/schemas"; @@ -173,7 +173,7 @@ router.put( if (member_id === "@me") { member_id = req.user_id; rights.hasThrow("JOIN_GUILDS"); - if (req.user_bot) throw DiscordApiErrors.BOT_PROHIBITED_ENDPOINT; + if (req.user_bot && !Config.get().user.botsCanUseInvites) throw DiscordApiErrors.BOT_PROHIBITED_ENDPOINT; } else { // TODO: check oauth2 scope diff --git a/src/api/routes/invites/index.ts b/src/api/routes/invites/index.ts index 7e43aa567..630fe03c3 100644 --- a/src/api/routes/invites/index.ts +++ b/src/api/routes/invites/index.ts @@ -17,7 +17,7 @@ */ import { route } from "@spacebar/api"; -import { Ban, DiscordApiErrors, emitEvent, getPermission, Guild, Invite, InviteDeleteEvent, PublicInviteRelation, User } from "@spacebar/util"; +import { Ban, Config, DiscordApiErrors, emitEvent, getPermission, Guild, Invite, InviteDeleteEvent, PublicInviteRelation, User } from "@spacebar/util"; import { Request, Response, Router } from "express"; import { HTTPError } from "lambert-server"; import { UserFlags } from "@spacebar/schemas"; @@ -68,7 +68,7 @@ router.post( }, }), async (req: Request, res: Response) => { - if (req.user_bot) throw DiscordApiErrors.BOT_PROHIBITED_ENDPOINT; + if (req.user_bot && !Config.get().user.botsCanUseInvites) throw DiscordApiErrors.BOT_PROHIBITED_ENDPOINT; const { invite_code } = req.params; const { public_flags } = req.user; diff --git a/src/util/config/types/UsersConfiguration.ts b/src/util/config/types/UsersConfiguration.ts index 2ac058e92..d51a3b851 100755 --- a/src/util/config/types/UsersConfiguration.ts +++ b/src/util/config/types/UsersConfiguration.ts @@ -19,4 +19,5 @@ export class UserConfiguration { blockedContains: string[] = ["discord", "clyde", "spacebar", "steam", "community", "support", "ticket"]; blockedEquals: string[] = ["everyone", "here"]; + botsCanUseInvites = false; } From 56d8aa2326382633be81a014530b11f14aaa2a62 Mon Sep 17 00:00:00 2001 From: Rory& Date: Mon, 26 Jan 2026 10:20:52 +0100 Subject: [PATCH 026/151] C# dependency updates, CDN-CS work --- .idea/workspace.xml | 20 +- .../.idea/indexLayout.xml | 4 +- .../.idea/sqldialects.xml | 6 + .../FilesystemFileSource.cs | 41 +++ .../IFileSource.cs | 24 +- .../LruFileCache.cs | 35 ++ .../ProxyFileSource.cs | 9 +- .../Spacebar.Interop.Cdn.Abstractions.csproj | 12 + .../deps.json | 1 + ...acebar.Interop.Replication.RabbitMq.csproj | 2 +- .../Spacebar.Models.Db.csproj | 4 +- .../Models/Spacebar.Models.Db/deps.json | 185 ++-------- .../Spacebar.AdminApi.csproj | 19 +- .../Controllers/GetImageController.cs | 4 +- .../Internal/GetImageController.cs | 5 +- .../Extensions/FileSourceExtensions.cs | 25 ++ extra/admin-api/Spacebar.Cdn/Program.cs | 11 +- .../Properties/launchSettings.json | 10 + .../Services/CdnStorageRebuildService.cs | 5 + .../Spacebar.Cdn/Services/LruFileCache.cs | 58 --- .../Spacebar.Cdn/Spacebar.Cdn.csproj | 5 +- .../Spacebar.Cdn/StreamingHttpClient.cs | 304 ---------------- extra/admin-api/SpacebarAdminAPI.sln | 30 ++ .../DiscordEmojiConverter.csproj | 2 +- .../Spacebar.AdminApi.PrepareTestData.csproj | 2 +- .../Pages/Guilds.razor | 1 + .../Pages/HttpTestClient.razor | 1 + .../Pages/NonAdmin/StickerManager.razor | 1 + .../Pages/ServerConfig.razor | 1 + .../Pages/Users.razor | 1 + .../Pages/UsersDelete.razor | 1 + .../Services/StreamingHttpClient.cs | 304 ---------------- .../Spacebar.AdminApi.TestClient.csproj | 4 +- .../Spacebar.AdminApiTest.csproj | 2 +- .../Spacebar.Cdn.Fsck/FsckService.cs | 117 ++++++ .../Utilities/Spacebar.Cdn.Fsck/Program.cs | 21 ++ .../Properties/launchSettings.json | 19 + .../Spacebar.Cdn.Fsck.csproj | 19 + .../appsettings.Development.json | 8 + .../Spacebar.Cdn.Fsck/appsettings.json | 8 + .../Utilities/Spacebar.Cdn.Fsck/deps.json | 1 + .../Spacebar.CleanSettingsRows/deps.json | 196 ++--------- extra/admin-api/nuget.config | 4 +- extra/admin-api/outputs.nix | 12 + nix/modules/default/cdn-cs.nix | 333 ++++++++++++++++++ nix/modules/default/default.nix | 57 +-- 46 files changed, 861 insertions(+), 1073 deletions(-) create mode 100644 extra/admin-api/.idea/.idea.SpacebarAdminAPI/.idea/sqldialects.xml create mode 100644 extra/admin-api/Interop/Spacebar.Interop.Cdn.Abstractions/FilesystemFileSource.cs rename extra/admin-api/{Spacebar.Cdn/Services => Interop/Spacebar.Interop.Cdn.Abstractions}/IFileSource.cs (51%) create mode 100644 extra/admin-api/Interop/Spacebar.Interop.Cdn.Abstractions/LruFileCache.cs rename extra/admin-api/{Spacebar.Cdn/Services => Interop/Spacebar.Interop.Cdn.Abstractions}/ProxyFileSource.cs (76%) create mode 100644 extra/admin-api/Interop/Spacebar.Interop.Cdn.Abstractions/Spacebar.Interop.Cdn.Abstractions.csproj create mode 100644 extra/admin-api/Interop/Spacebar.Interop.Cdn.Abstractions/deps.json create mode 100644 extra/admin-api/Spacebar.Cdn/Extensions/FileSourceExtensions.cs create mode 100644 extra/admin-api/Spacebar.Cdn/Services/CdnStorageRebuildService.cs delete mode 100644 extra/admin-api/Spacebar.Cdn/Services/LruFileCache.cs delete mode 100644 extra/admin-api/Spacebar.Cdn/StreamingHttpClient.cs delete mode 100644 extra/admin-api/Utilities/Spacebar.AdminApi.TestClient/Services/StreamingHttpClient.cs create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/FsckService.cs create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/Program.cs create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/Properties/launchSettings.json create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/Spacebar.Cdn.Fsck.csproj create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/appsettings.Development.json create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/appsettings.json create mode 100644 extra/admin-api/Utilities/Spacebar.Cdn.Fsck/deps.json create mode 100644 nix/modules/default/cdn-cs.nix diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 741d94a61..b9d89f055 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -12,6 +12,11 @@