Compare commits

...

17 Commits
dev ... main

Author SHA1 Message Date
Skylar Grant 2a3b53ea08 Fix kms emoji is 2024-06-21 15:14:38 -04:00
Skylar Grant 99af5ca8b4 Add f u nodbot response 2024-06-21 15:11:34 -04:00
Skylar Grant 7003351ecc Updating how autoresponses are handled to be cleaner 2024-06-21 15:02:22 -04:00
Skylar Grant bfa6e10011 Versioning v3.2.3 2024-06-21 15:02:08 -04:00
Skylar Grant 22f2ac58df I hate you 2024-06-21 15:01:56 -04:00
Skylar Grant 3dcc4e021f Merge pull request 'v3.2.2-dev Merged new fixed sbs and OpenAI deintegration' (#12) from v3.2.2-dev into main
Reviewed-on: #12
2024-06-15 02:24:16 +00:00
Skylar Grant d1e2152de9 Disabled OpenAI init
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2024-06-14 22:06:54 -04:00
Skylar Grant 1a024b216a Merge branch 'dev' into v3.2.2-dev 2024-06-14 22:00:33 -04:00
Skylar Grant ae9ce308b2 Ignore Mac BS
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2024-06-14 14:43:23 -04:00
Skylar Grant 2ea94092db Ignore Mac BS 2024-06-14 14:43:06 -04:00
Skylar Grant 3f5cc0a9ff Fix exec dir
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2024-01-01 15:47:14 -05:00
Skylar Grant 545756b762 Fix log overwriting error
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2024-01-01 15:44:55 -05:00
Skylar Grant d61ffbaaf7 Remove chat and dalle commands
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2024-01-01 15:42:17 -05:00
Skylar Grant 15d39e3381 commmenting
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2023-12-14 10:54:05 -05:00
Skylar Grant 97b4136b64 Fix joint randomization and add more aliases
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2023-12-14 10:43:40 -05:00
Skylar Grant 82f65a800c bugfixes
NodBot Production Dockerization / build (pull_request) Has been cancelled Details
2023-12-13 16:49:48 -05:00
Skylar Grant f995d9a643 Upgrade to gpt-3.5-turbo
NodBot Production Dockerization / build (pull_request) Failing after 10m32s Details
2023-10-18 17:33:03 -04:00
13 changed files with 176 additions and 116 deletions

BIN
.DS_Store vendored

Binary file not shown.

1
.gitignore vendored
View File

@ -114,3 +114,4 @@ dist
# TernJS port file
.tern-port
.DS_Store

View File

@ -5,4 +5,5 @@ WORKDIR /usr/src/app
COPY package.json ./
RUN npm install
COPY . .
CMD ["/bin/sh", "-c", "node main.js 2> /logs/nodbot.error 1> /logs/nodbot.log"]
# CMD ["/bin/sh", "-c", "node main.js 2> /logs/nodbot.error 1> /logs/nodbot.log"]
CMD ["/bin/sh", "-c", "node main.js 2> /logs/$(date +%Y-%m-%d_%H-%M-%S)-error.txt 1> /logs/$(date +%Y-%m-%d_%H-%M-%S)-status.txt"]

View File

@ -1,4 +1,5 @@
{
"guildId": "868542949737246730",
"validCommands": []
"validCommands": [],
"roaches": []
}

View File

@ -5,13 +5,33 @@ module.exports = {
name: 'joint',
description: 'Send a random weed-themed phrase.',
usage: '.joint',
alias: ['bong', 'blunt', 'bowl', 'pipe'],
alias: ['bong', 'blunt', 'bowl', 'pipe', 'dab', 'vape', 'dabs', 'shatter', 'edible', 'edibles', 'doobie', 'spliff', 'gummy', 'gummies', 'hash', 'toke', 'big doinks'],
execute(message, commandData) {
let joints = [];
// Create a simple array of the joint texts
for (const entry of message.client.joints.map(joint => joint.content)) {
joints.push(entry);
}
const randIndex = Math.floor(Math.random() * joints.length);
// Generate a random number between 0 and the length of the joints array
let randIndex = Math.floor(Math.random() * joints.length);
// Grab the joint text from the array
let joint = joints[randIndex];
// Check if the joint has already been smoked
while (message.client.roaches.has(joint)) {
// Regenerate a random number and recheck
randIndex = Math.floor(Math.random() * joints.length);
joint = joints[randIndex];
}
// Send the joint
message.reply(`${joints[randIndex]} ${emoji.joint}`);
// Check how full the roach collection is
if (message.client.roaches.size / joints.length >= 0.85) {
// If the roach collection is 85% of the joints collection
// Empty it out
message.client.roaches.clear();
}
// Add the joint to the roach collection
message.client.roaches.set(joint, "baked");
}
}

View File

@ -8,7 +8,9 @@ module.exports = {
const client = message.client;
let pastaData;
if (!client.pastas.has(commandData.args)) {
commandData.content = 'Sorry I couldn\'t find that pasta.';
pastaData = {
content: "Sorry, I couldn't find that pasta."
};
} else {
pastaData = client.pastas.get(commandData.args);
}

View File

@ -22,20 +22,8 @@ const Discord = require('discord.js');
const FuzzySearch = require('fuzzy-search');
// OpenAI
const { Configuration, OpenAIApi } = require("openai");
const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
const openai = new OpenAIApi(configuration);
async function openAIStatus(o) {
const response = await o.listModels();
const models = response.data.data;
models.forEach(e => {
console.log(`Model ID: ${e.id}`);
});
};
openAIStatus(openai);
// const OpenAI = require("openai");
// const openai = new OpenAI();
// Various imports from other files
const config = require('./config.json');
@ -161,7 +149,7 @@ const functions = {
if (isDev) console.log('Strains Collection Built');
},
medicalAdvice(rows, client) {
if (!client.medicalAdviceCol) client.medicalAdviceColl = new Discord.Collection();
if (!client.medicalAdviceColl) client.medicalAdviceColl = new Discord.Collection();
client.medicalAdviceColl.clear();
for (const row of rows) {
const medicalAdvice = {
@ -172,6 +160,11 @@ const functions = {
}
if (isDev) console.log('Medical Advice Collection Built');
},
roaches(client) {
if (!client.roaches) client.roaches = new Discord.Collection();
client.roaches.clear();
if (isDev) console.log('Medical Advice Collection Built');
}
},
dot: {
getCommandData(message) {
@ -274,7 +267,7 @@ const functions = {
return { embeds: [ new Discord.MessageEmbed()
.setAuthor({name: `${commandData.args}.${commandData.command}`})
.setDescription(pastaData.content)
.setThumbnail(pastaData.iconUrl)
.setThumbnail("https://assets.vfsh.dev/shednod.png")
.setTimestamp()
.setFooter({text: commandData.author})]};
},
@ -379,9 +372,9 @@ const functions = {
},
gpt(prompt, response, usage) {
const gptEmbed = new Discord.MessageEmbed()
.setAuthor({ name: "NodBot powered by GPT-3", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
.setAuthor({ name: "NodBot powered by GPT-3.5", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢` })
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢. Generated using ${strings.ai.chatModel}` })
return { embeds: [gptEmbed] };
},
generatingResponse() {
@ -407,12 +400,13 @@ const functions = {
functions.download.requests(client);
});
},
pasta(pastaData, client) {
async pasta(pastaData, client) {
const query = `INSERT INTO pastas (name, content) VALUES (${db.escape(pastaData.name)},${db.escape(pastaData.content)}) ON DUPLICATE KEY UPDATE content=${db.escape(pastaData.content)}`;
db.query(query, (err, rows, fields) => {
await db.query(query, (err, rows, fields) => {
if (err) throw err;
functions.download.pastas(client);
});
return;
},
joint(content, client) {
const query = `INSERT INTO joints (content) VALUES (${db.escape(content)})`;
@ -570,16 +564,17 @@ const functions = {
openAI: {
chatPrompt(userPrompt) {
return new Promise(async (resolve, reject) => {
const response = await openai.createCompletion({
model: 'text-davinci-003',
prompt: userPrompt,
temperature: 0.7,
max_tokens: 250
const response = await openai.chat.completions.create({
messages: [{
role: 'user',
content: userPrompt
}],
model: strings.ai.chatModel
}).catch(e => {
reject(e);
return null;
});
resolve(response.data);
resolve(response);
});
},
imagePrompt(userPrompt, size, userId) {
@ -654,6 +649,66 @@ const functions = {
return newText + ' <:spongebob:1053398825965985822>';
},
autoresponses: { // Specific responses for certain keywords in sent messages
checkForAll(messageContent) {
let responses = [];
if (this.bigDoinks(messageContent)) responses.push("bigDoinks");
if (this.ligma(messageContent)) responses.push("ligma");
if (this.ong(messageContent)) responses.push("ong");
if (this.fuckYou(messageContent)) responses.push("fuckYou");
return responses;
},
bigDoinks(messageContent) {
let count = 0;
const { keywords } = strings.autoresponses.bigDoinks;
keywords.forEach(e => {
if (messageContent.includes(e)) count++;
});
if (count === keywords.length) {
return true;
}
},
ligma(messageContent) {
let count = 0;
const { keywords } = strings.autoresponses.ligma;
keywords.forEach(e => {
if (messageContent.includes(e)) count++;
});
if (count > 0) {
return true;
}
},
ong(messageContent) {
let count = 0;
const { keywords } = strings.autoresponses.ong;
keywords.forEach(e => {
if (messageContent.includes(e)) count++;
});
if (count > 0) {
return true;
}
},
fuckYou(messageContent) {
let count = 0;
const { keywords } = strings.autoresponses.fuckYou;
keywords.forEach(e => {
if (messageContent.includes(e)) count++;
});
if (count === keywords.length) {
return true;
}
},
send(message, responseType) {
const { responses } = strings.autoresponses[responseType];
const randomIndex = Math.floor(Math.random() * responses.length);
const response = responses[randomIndex];
try {
message.reply(response);
} catch(e) {
console.log(new Error(e));
}
}
},
generateErrorId() {
const digitCount = 10;
const digits = [];

13
main.js
View File

@ -34,6 +34,7 @@ client.once('ready', () => {
fn.collections.slashCommands(client);
fn.collections.dotCommands(client);
fn.collections.setvalidCommands(client);
fn.collections.roaches(client);
fn.download.gifs(client);
fn.download.pastas(client);
fn.download.joints(client);
@ -220,14 +221,12 @@ client.on('messageCreate', message => {
// Some basic checking to prevent running unnecessary code
if (message.author.bot) return;
// Wildcard Responses, will respond if any message contains the trigger word(s), excluding self-messages
// Automatic Responses, will respond if any message contains the keyword(s), excluding self-messages
const lowerContent = message.content.toLowerCase();
if (lowerContent.includes('big') && lowerContent.includes('doinks')) message.reply('gang.');
if (lowerContent.includes('ligma')) message.reply('ligma balls, goteem');
if (lowerContent.includes('frfr') || lowerContent.includes('fr fr') || lowerContent.includes('bussin') || lowerContent.includes(' ong') || lowerContent.startsWith('ong')) {
const randIndex = Math.floor(Math.random() * strings.capbacks.length);
message.reply(strings.capbacks[randIndex]);
}
const autoresponses = fn.autoresponses.checkForAll(lowerContent);
autoresponses.forEach(e => {
fn.autoresponses.send(message, e);
});
// Break the message down into its components and analyze it
const commandData = fn.dot.getCommandData(message);

View File

@ -1,6 +1,6 @@
{
"name": "nodbot",
"version": "3.2.2",
"version": "3.2.3",
"description": "Nods and Nod Accessories, now with ChatGPT!",
"main": "main.js",
"dependencies": {
@ -12,7 +12,7 @@
"dotenv": "^10.0.0",
"fuzzy-search": "^3.2.1",
"mysql": "^2.18.1",
"openai": "^3.2.1",
"openai": "^4.12.0",
"tenorjs": "^1.0.10"
},
"engines": {

View File

@ -1,27 +0,0 @@
const { SlashCommandBuilder } = require('@discordjs/builders');
const fn = require('../functions.js');
module.exports = {
data: new SlashCommandBuilder()
.setName('chat')
.setDescription('Send a message to ChatGPT')
.addStringOption(o =>
o.setName("prompt")
.setDescription("Prompt to send to ChatGPT")
.setRequired(true)
),
async execute(interaction) {
await interaction.deferReply();
await interaction.editReply(fn.embeds.generatingResponse());
const userPrompt = interaction.options.getString("prompt");
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
const responseText = response.choices[0].text;
const usage = {
tokens: response.usage.total_tokens,
usdc: response.usage.total_tokens * ( 0.2 / 1000 ) // 0.2¢ per 1000 tokens or 0.0002¢ per token.
};
const gptEmbed = fn.embeds.gpt(userPrompt, responseText, usage);
await interaction.editReply(gptEmbed);
fn.upload.openai(interaction.user.id, userPrompt, "gpt-3.5-turbo", usage.tokens, usage.usdc);
},
};

View File

@ -1,40 +0,0 @@
const { SlashCommandBuilder } = require('@discordjs/builders');
const fn = require('../functions.js');
const strings = require("../strings.json");
module.exports = {
data: new SlashCommandBuilder()
.setName('dalle')
.setDescription('Generate an image with DALL-e')
.addStringOption(o =>
o.setName("prompt")
.setDescription("Prompt to send to DALL-e")
.setRequired(true)
)
.addStringOption(o =>
o.setName("size")
.setDescription("1024x1024, 512x512, 256x256")
.setRequired(false)
.addChoices(
{ name: "1024x1024 (2¢)", value: "1024x1024" },
{ name: "512x512 (1.8¢)", value: "512x512" },
{ name: "256x256 (1.6¢)", value: "256x256" }
)),
async execute(interaction) {
try {
await interaction.deferReply();
await interaction.editReply(fn.embeds.generatingResponse());
const userPrompt = interaction.options.getString("prompt");
const size = interaction.options.getString("size") ? interaction.options.getString("size") : "512x512";
const imageUrl = await fn.openAI.imagePrompt(userPrompt, size);
const dalleEmbed = fn.embeds.dalle(userPrompt, imageUrl, size);
await interaction.editReply(dalleEmbed);
fn.upload.openai(interaction.user.id, userPrompt, "dalle", 0, strings.costs.dalle[size]);
} catch (err) {
const errorId = fn.generateErrorId();
console.error(`${errorId}: ${err}`);
await interaction.editReply(`An error has occured. Error ID: ${errorId}\n${err}`);
}
},
};

View File

@ -207,7 +207,7 @@ module.exports = {
name: interaction.options.getString('pasta-name').toLowerCase(),
content: interaction.options.getString('pasta-content'),
};
fn.upload.pasta(pastaData, interaction.client);
await fn.upload.pasta(pastaData, interaction.client);
interaction.editReply({content: `The copypasta has been saved as ${pastaData.name}.pasta`, ephemeral: true });
break;
// Strain

View File

@ -14,14 +14,6 @@
"urls": {
"avatar": "https://cdn.discordapp.com/avatars/513184762073055252/12227aa23a06d5178853e59b72c7487b.webp?size=128"
},
"capbacks": [
"on god?!",
"fr fr?!",
"no cap?!",
"no cap fr",
"bussin fr, no cap",
"ongggg no :billed_cap: fr fr"
],
"costs": {
"gpt": {
"gpt-3.5-turbo": 0.2
@ -32,5 +24,61 @@
"1024x1024": 2.0
}
},
"ai": {
"chatModel": "gpt-3.5-turbo",
"chatPromptCentsPer": 0.15,
"chatPromptUnits": 1000,
"chatResCentsPer": 0.2,
"chatResUnits": 1000
},
"autoresponses": {
"bigDoinks": {
"keywords": ["big", "doinks"],
"responses": [
"<:bigdoinks:1053706618853924905> Gang.",
"<:bigdoinks:1053706618853924905> Out here in Amish",
"<:bigdoinks:1053706618853924905> Out here in Amish, smoking Big Doinks in Amish... Gang."
]
},
"ligma": {
"keywords": ["ligma"],
"responses": [
"ligma balls lmao gottem",
"ligma balls ahaha",
"https://tenor.com/view/ligma-balls-gif-12236083",
"<:deadmonkey:1139186312444911707>"
]
},
"ong": {
"keywords": [
"frfr",
"fr fr",
"bussin",
"no cap",
" ong "
],
"responses": [
"on god?!",
"fr fr?!",
"no cap?!",
"no cap fr",
"bussin fr, no cap",
"ongggg no :billed_cap: fr fr"
]
},
"fuckYou": {
"keywords": [
"fuck",
"nodbot"
],
"responses": [
"no u",
"go fuck yourself",
"why does everyone hate me :sob:",
"<:kms:1253790048696926298>",
"Eat a bag of dicks"
]
}
},
"temp": {}
}