chore: merge branch dev
to main
#195
1 changed files with 26 additions and 11 deletions
|
@ -1,4 +1,4 @@
|
|||
import { Client, EmbedBuilder, Message, TextBasedChannel } from "discord.js";
|
||||
import { APIEmbedField, Client, EmbedBuilder, Message, TextBasedChannel } from "discord.js";
|
||||
import { getLocale } from "../../utils/locales";
|
||||
import { isImage, userWithNickname } from "../../utils/misc";
|
||||
import { showDate } from "../../utils/time";
|
||||
|
@ -114,18 +114,33 @@ export default async (message: Message, client: Client) => {
|
|||
embed.setImage(quoted_post.attachments.first()!.url);
|
||||
} else {
|
||||
// Contains more than one image and/or other files
|
||||
let files = "";
|
||||
quoted_post.attachments.forEach((file) => (files += `[${file.name}](${file.url}), `));
|
||||
embed.addFields({
|
||||
name:
|
||||
quoted_post.attachments.size > 1
|
||||
? loc.get("e_attachements")
|
||||
: loc.get("e_attachement"),
|
||||
|
||||
// TODO: Check if don't exceed char limit, if yes, split
|
||||
// files into multiples field.
|
||||
value: `${files.slice(0, -2)}.`,
|
||||
// We are currently losing a link to a file if the link is too long,
|
||||
// but we can't do much about it
|
||||
const maxFieldValueLength = 1024;
|
||||
const files = quoted_post.attachments
|
||||
.map((file) => `[${file.name}](${file.url}`)
|
||||
.filter((link) => link.length <= maxFieldValueLength);
|
||||
|
||||
let currentField = "";
|
||||
const fields: APIEmbedField[] = [];
|
||||
files.forEach((file, idx) => {
|
||||
const potentialField = `${currentField}, ${file}`;
|
||||
|
||||
if (potentialField.length > maxFieldValueLength || idx === files.length - 1) {
|
||||
fields.push({
|
||||
name: loc.get(
|
||||
quoted_post.attachments.size > 1 ? "e_attachements" : "e_attachement",
|
||||
),
|
||||
value: currentField,
|
||||
});
|
||||
currentField = file;
|
||||
} else {
|
||||
currentField = potentialField;
|
||||
}
|
||||
});
|
||||
|
||||
embed.addFields(fields);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue