Merge branch 'dev' into v3.2.2-dev
This commit is contained in:
commit
1a024b216a
1
.gitignore
vendored
1
.gitignore
vendored
@ -114,3 +114,4 @@ dist
|
|||||||
|
|
||||||
# TernJS port file
|
# TernJS port file
|
||||||
.tern-port
|
.tern-port
|
||||||
|
.DS_Store
|
||||||
|
@ -5,4 +5,5 @@ WORKDIR /usr/src/app
|
|||||||
COPY package.json ./
|
COPY package.json ./
|
||||||
RUN npm install
|
RUN npm install
|
||||||
COPY . .
|
COPY . .
|
||||||
CMD ["/bin/sh", "-c", "node main.js 2> /logs/nodbot.error 1> /logs/nodbot.log"]
|
# CMD ["/bin/sh", "-c", "node main.js 2> /logs/nodbot.error 1> /logs/nodbot.log"]
|
||||||
|
CMD ["/bin/sh", "-c", "node main.js 2> /logs/$(date +%Y-%m-%d_%H-%M-%S)-error.txt 1> /logs/$(date +%Y-%m-%d_%H-%M-%S)-status.txt"]
|
@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
"guildId": "868542949737246730",
|
"guildId": "868542949737246730",
|
||||||
"validCommands": []
|
"validCommands": [],
|
||||||
|
"roaches": []
|
||||||
}
|
}
|
@ -5,13 +5,33 @@ module.exports = {
|
|||||||
name: 'joint',
|
name: 'joint',
|
||||||
description: 'Send a random weed-themed phrase.',
|
description: 'Send a random weed-themed phrase.',
|
||||||
usage: '.joint',
|
usage: '.joint',
|
||||||
alias: ['bong', 'blunt', 'bowl', 'pipe'],
|
alias: ['bong', 'blunt', 'bowl', 'pipe', 'dab', 'vape', 'dabs', 'shatter', 'edible', 'edibles', 'doobie', 'spliff', 'gummy', 'gummies', 'hash', 'toke', 'big doinks'],
|
||||||
execute(message, commandData) {
|
execute(message, commandData) {
|
||||||
let joints = [];
|
let joints = [];
|
||||||
|
// Create a simple array of the joint texts
|
||||||
for (const entry of message.client.joints.map(joint => joint.content)) {
|
for (const entry of message.client.joints.map(joint => joint.content)) {
|
||||||
joints.push(entry);
|
joints.push(entry);
|
||||||
}
|
}
|
||||||
const randIndex = Math.floor(Math.random() * joints.length);
|
// Generate a random number between 0 and the length of the joints array
|
||||||
|
let randIndex = Math.floor(Math.random() * joints.length);
|
||||||
|
// Grab the joint text from the array
|
||||||
|
let joint = joints[randIndex];
|
||||||
|
|
||||||
|
// Check if the joint has already been smoked
|
||||||
|
while (message.client.roaches.has(joint)) {
|
||||||
|
// Regenerate a random number and recheck
|
||||||
|
randIndex = Math.floor(Math.random() * joints.length);
|
||||||
|
joint = joints[randIndex];
|
||||||
|
}
|
||||||
|
// Send the joint
|
||||||
message.reply(`${joints[randIndex]} ${emoji.joint}`);
|
message.reply(`${joints[randIndex]} ${emoji.joint}`);
|
||||||
|
// Check how full the roach collection is
|
||||||
|
if (message.client.roaches.size / joints.length >= 0.85) {
|
||||||
|
// If the roach collection is 85% of the joints collection
|
||||||
|
// Empty it out
|
||||||
|
message.client.roaches.clear();
|
||||||
|
}
|
||||||
|
// Add the joint to the roach collection
|
||||||
|
message.client.roaches.set(joint, "baked");
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -8,7 +8,9 @@ module.exports = {
|
|||||||
const client = message.client;
|
const client = message.client;
|
||||||
let pastaData;
|
let pastaData;
|
||||||
if (!client.pastas.has(commandData.args)) {
|
if (!client.pastas.has(commandData.args)) {
|
||||||
commandData.content = 'Sorry I couldn\'t find that pasta.';
|
pastaData = {
|
||||||
|
content: "Sorry, I couldn't find that pasta."
|
||||||
|
};
|
||||||
} else {
|
} else {
|
||||||
pastaData = client.pastas.get(commandData.args);
|
pastaData = client.pastas.get(commandData.args);
|
||||||
}
|
}
|
||||||
|
47
functions.js
47
functions.js
@ -22,20 +22,8 @@ const Discord = require('discord.js');
|
|||||||
const FuzzySearch = require('fuzzy-search');
|
const FuzzySearch = require('fuzzy-search');
|
||||||
|
|
||||||
// OpenAI
|
// OpenAI
|
||||||
const { Configuration, OpenAIApi } = require("openai");
|
const OpenAI = require("openai");
|
||||||
|
const openai = new OpenAI();
|
||||||
const configuration = new Configuration({
|
|
||||||
apiKey: process.env.OPENAI_API_KEY,
|
|
||||||
});
|
|
||||||
const openai = new OpenAIApi(configuration);
|
|
||||||
async function openAIStatus(o) {
|
|
||||||
const response = await o.listModels();
|
|
||||||
const models = response.data.data;
|
|
||||||
models.forEach(e => {
|
|
||||||
console.log(`Model ID: ${e.id}`);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
openAIStatus(openai);
|
|
||||||
|
|
||||||
// Various imports from other files
|
// Various imports from other files
|
||||||
const config = require('./config.json');
|
const config = require('./config.json');
|
||||||
@ -161,7 +149,7 @@ const functions = {
|
|||||||
if (isDev) console.log('Strains Collection Built');
|
if (isDev) console.log('Strains Collection Built');
|
||||||
},
|
},
|
||||||
medicalAdvice(rows, client) {
|
medicalAdvice(rows, client) {
|
||||||
if (!client.medicalAdviceCol) client.medicalAdviceColl = new Discord.Collection();
|
if (!client.medicalAdviceColl) client.medicalAdviceColl = new Discord.Collection();
|
||||||
client.medicalAdviceColl.clear();
|
client.medicalAdviceColl.clear();
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
const medicalAdvice = {
|
const medicalAdvice = {
|
||||||
@ -172,6 +160,11 @@ const functions = {
|
|||||||
}
|
}
|
||||||
if (isDev) console.log('Medical Advice Collection Built');
|
if (isDev) console.log('Medical Advice Collection Built');
|
||||||
},
|
},
|
||||||
|
roaches(client) {
|
||||||
|
if (!client.roaches) client.roaches = new Discord.Collection();
|
||||||
|
client.roaches.clear();
|
||||||
|
if (isDev) console.log('Medical Advice Collection Built');
|
||||||
|
}
|
||||||
},
|
},
|
||||||
dot: {
|
dot: {
|
||||||
getCommandData(message) {
|
getCommandData(message) {
|
||||||
@ -274,7 +267,7 @@ const functions = {
|
|||||||
return { embeds: [ new Discord.MessageEmbed()
|
return { embeds: [ new Discord.MessageEmbed()
|
||||||
.setAuthor({name: `${commandData.args}.${commandData.command}`})
|
.setAuthor({name: `${commandData.args}.${commandData.command}`})
|
||||||
.setDescription(pastaData.content)
|
.setDescription(pastaData.content)
|
||||||
.setThumbnail(pastaData.iconUrl)
|
.setThumbnail("https://assets.vfsh.dev/shednod.png")
|
||||||
.setTimestamp()
|
.setTimestamp()
|
||||||
.setFooter({text: commandData.author})]};
|
.setFooter({text: commandData.author})]};
|
||||||
},
|
},
|
||||||
@ -379,9 +372,9 @@ const functions = {
|
|||||||
},
|
},
|
||||||
gpt(prompt, response, usage) {
|
gpt(prompt, response, usage) {
|
||||||
const gptEmbed = new Discord.MessageEmbed()
|
const gptEmbed = new Discord.MessageEmbed()
|
||||||
.setAuthor({ name: "NodBot powered by GPT-3", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
.setAuthor({ name: "NodBot powered by GPT-3.5", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
||||||
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢` })
|
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢. Generated using ${strings.ai.chatModel}` })
|
||||||
return { embeds: [gptEmbed] };
|
return { embeds: [gptEmbed] };
|
||||||
},
|
},
|
||||||
generatingResponse() {
|
generatingResponse() {
|
||||||
@ -407,12 +400,13 @@ const functions = {
|
|||||||
functions.download.requests(client);
|
functions.download.requests(client);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
pasta(pastaData, client) {
|
async pasta(pastaData, client) {
|
||||||
const query = `INSERT INTO pastas (name, content) VALUES (${db.escape(pastaData.name)},${db.escape(pastaData.content)}) ON DUPLICATE KEY UPDATE content=${db.escape(pastaData.content)}`;
|
const query = `INSERT INTO pastas (name, content) VALUES (${db.escape(pastaData.name)},${db.escape(pastaData.content)}) ON DUPLICATE KEY UPDATE content=${db.escape(pastaData.content)}`;
|
||||||
db.query(query, (err, rows, fields) => {
|
await db.query(query, (err, rows, fields) => {
|
||||||
if (err) throw err;
|
if (err) throw err;
|
||||||
functions.download.pastas(client);
|
functions.download.pastas(client);
|
||||||
});
|
});
|
||||||
|
return;
|
||||||
},
|
},
|
||||||
joint(content, client) {
|
joint(content, client) {
|
||||||
const query = `INSERT INTO joints (content) VALUES (${db.escape(content)})`;
|
const query = `INSERT INTO joints (content) VALUES (${db.escape(content)})`;
|
||||||
@ -570,16 +564,17 @@ const functions = {
|
|||||||
openAI: {
|
openAI: {
|
||||||
chatPrompt(userPrompt) {
|
chatPrompt(userPrompt) {
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
const response = await openai.createCompletion({
|
const response = await openai.chat.completions.create({
|
||||||
model: 'text-davinci-003',
|
messages: [{
|
||||||
prompt: userPrompt,
|
role: 'user',
|
||||||
temperature: 0.7,
|
content: userPrompt
|
||||||
max_tokens: 250
|
}],
|
||||||
|
model: strings.ai.chatModel
|
||||||
}).catch(e => {
|
}).catch(e => {
|
||||||
reject(e);
|
reject(e);
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
resolve(response.data);
|
resolve(response);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
imagePrompt(userPrompt, size, userId) {
|
imagePrompt(userPrompt, size, userId) {
|
||||||
|
4
main.js
4
main.js
@ -34,6 +34,7 @@ client.once('ready', () => {
|
|||||||
fn.collections.slashCommands(client);
|
fn.collections.slashCommands(client);
|
||||||
fn.collections.dotCommands(client);
|
fn.collections.dotCommands(client);
|
||||||
fn.collections.setvalidCommands(client);
|
fn.collections.setvalidCommands(client);
|
||||||
|
fn.collections.roaches(client);
|
||||||
fn.download.gifs(client);
|
fn.download.gifs(client);
|
||||||
fn.download.pastas(client);
|
fn.download.pastas(client);
|
||||||
fn.download.joints(client);
|
fn.download.joints(client);
|
||||||
@ -222,8 +223,11 @@ client.on('messageCreate', message => {
|
|||||||
|
|
||||||
// Wildcard Responses, will respond if any message contains the trigger word(s), excluding self-messages
|
// Wildcard Responses, will respond if any message contains the trigger word(s), excluding self-messages
|
||||||
const lowerContent = message.content.toLowerCase();
|
const lowerContent = message.content.toLowerCase();
|
||||||
|
// big + doinks
|
||||||
if (lowerContent.includes('big') && lowerContent.includes('doinks')) message.reply('gang.');
|
if (lowerContent.includes('big') && lowerContent.includes('doinks')) message.reply('gang.');
|
||||||
|
// ligma
|
||||||
if (lowerContent.includes('ligma')) message.reply('ligma balls, goteem');
|
if (lowerContent.includes('ligma')) message.reply('ligma balls, goteem');
|
||||||
|
// frfr, fr fr , bussin, ong
|
||||||
if (lowerContent.includes('frfr') || lowerContent.includes('fr fr') || lowerContent.includes('bussin') || lowerContent.includes(' ong') || lowerContent.startsWith('ong')) {
|
if (lowerContent.includes('frfr') || lowerContent.includes('fr fr') || lowerContent.includes('bussin') || lowerContent.includes(' ong') || lowerContent.startsWith('ong')) {
|
||||||
const randIndex = Math.floor(Math.random() * strings.capbacks.length);
|
const randIndex = Math.floor(Math.random() * strings.capbacks.length);
|
||||||
message.reply(strings.capbacks[randIndex]);
|
message.reply(strings.capbacks[randIndex]);
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
"dotenv": "^10.0.0",
|
"dotenv": "^10.0.0",
|
||||||
"fuzzy-search": "^3.2.1",
|
"fuzzy-search": "^3.2.1",
|
||||||
"mysql": "^2.18.1",
|
"mysql": "^2.18.1",
|
||||||
"openai": "^3.2.1",
|
"openai": "^4.12.0",
|
||||||
"tenorjs": "^1.0.10"
|
"tenorjs": "^1.0.10"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
|
||||||
const fn = require('../functions.js');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
data: new SlashCommandBuilder()
|
|
||||||
.setName('chat')
|
|
||||||
.setDescription('Send a message to ChatGPT')
|
|
||||||
.addStringOption(o =>
|
|
||||||
o.setName("prompt")
|
|
||||||
.setDescription("Prompt to send to ChatGPT")
|
|
||||||
.setRequired(true)
|
|
||||||
),
|
|
||||||
async execute(interaction) {
|
|
||||||
await interaction.deferReply();
|
|
||||||
await interaction.editReply(fn.embeds.generatingResponse());
|
|
||||||
const userPrompt = interaction.options.getString("prompt");
|
|
||||||
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
|
||||||
const responseText = response.choices[0].text;
|
|
||||||
const usage = {
|
|
||||||
tokens: response.usage.total_tokens,
|
|
||||||
usdc: response.usage.total_tokens * ( 0.2 / 1000 ) // 0.2¢ per 1000 tokens or 0.0002¢ per token.
|
|
||||||
};
|
|
||||||
const gptEmbed = fn.embeds.gpt(userPrompt, responseText, usage);
|
|
||||||
await interaction.editReply(gptEmbed);
|
|
||||||
fn.upload.openai(interaction.user.id, userPrompt, "gpt-3.5-turbo", usage.tokens, usage.usdc);
|
|
||||||
},
|
|
||||||
};
|
|
@ -1,40 +0,0 @@
|
|||||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
|
||||||
const fn = require('../functions.js');
|
|
||||||
const strings = require("../strings.json");
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
data: new SlashCommandBuilder()
|
|
||||||
.setName('dalle')
|
|
||||||
.setDescription('Generate an image with DALL-e')
|
|
||||||
.addStringOption(o =>
|
|
||||||
o.setName("prompt")
|
|
||||||
.setDescription("Prompt to send to DALL-e")
|
|
||||||
.setRequired(true)
|
|
||||||
)
|
|
||||||
.addStringOption(o =>
|
|
||||||
o.setName("size")
|
|
||||||
.setDescription("1024x1024, 512x512, 256x256")
|
|
||||||
.setRequired(false)
|
|
||||||
.addChoices(
|
|
||||||
{ name: "1024x1024 (2¢)", value: "1024x1024" },
|
|
||||||
{ name: "512x512 (1.8¢)", value: "512x512" },
|
|
||||||
{ name: "256x256 (1.6¢)", value: "256x256" }
|
|
||||||
)),
|
|
||||||
async execute(interaction) {
|
|
||||||
try {
|
|
||||||
await interaction.deferReply();
|
|
||||||
await interaction.editReply(fn.embeds.generatingResponse());
|
|
||||||
const userPrompt = interaction.options.getString("prompt");
|
|
||||||
const size = interaction.options.getString("size") ? interaction.options.getString("size") : "512x512";
|
|
||||||
|
|
||||||
const imageUrl = await fn.openAI.imagePrompt(userPrompt, size);
|
|
||||||
const dalleEmbed = fn.embeds.dalle(userPrompt, imageUrl, size);
|
|
||||||
await interaction.editReply(dalleEmbed);
|
|
||||||
fn.upload.openai(interaction.user.id, userPrompt, "dalle", 0, strings.costs.dalle[size]);
|
|
||||||
} catch (err) {
|
|
||||||
const errorId = fn.generateErrorId();
|
|
||||||
console.error(`${errorId}: ${err}`);
|
|
||||||
await interaction.editReply(`An error has occured. Error ID: ${errorId}\n${err}`);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
@ -207,7 +207,7 @@ module.exports = {
|
|||||||
name: interaction.options.getString('pasta-name').toLowerCase(),
|
name: interaction.options.getString('pasta-name').toLowerCase(),
|
||||||
content: interaction.options.getString('pasta-content'),
|
content: interaction.options.getString('pasta-content'),
|
||||||
};
|
};
|
||||||
fn.upload.pasta(pastaData, interaction.client);
|
await fn.upload.pasta(pastaData, interaction.client);
|
||||||
interaction.editReply({content: `The copypasta has been saved as ${pastaData.name}.pasta`, ephemeral: true });
|
interaction.editReply({content: `The copypasta has been saved as ${pastaData.name}.pasta`, ephemeral: true });
|
||||||
break;
|
break;
|
||||||
// Strain
|
// Strain
|
||||||
|
@ -32,5 +32,12 @@
|
|||||||
"1024x1024": 2.0
|
"1024x1024": 2.0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"ai": {
|
||||||
|
"chatModel": "gpt-3.5-turbo",
|
||||||
|
"chatPromptCentsPer": 0.15,
|
||||||
|
"chatPromptUnits": 1000,
|
||||||
|
"chatResCentsPer": 0.2,
|
||||||
|
"chatResUnits": 1000
|
||||||
|
},
|
||||||
"temp": {}
|
"temp": {}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user