Compare commits
12 Commits
1f85b97467
...
38fdc07e4e
Author | SHA1 | Date | |
---|---|---|---|
38fdc07e4e | |||
e99e6b2e1f | |||
98acb652af | |||
c8d059e36d | |||
|
7c09aaffaa | ||
|
08618e2be2 | ||
|
200468ac10 | ||
|
efdc605dc0 | ||
|
2c75ff09d9 | ||
|
4e2e8bc702 | ||
|
53625be91f | ||
|
755c8510d7 |
2
.github/workflows/production-docker.yml
vendored
2
.github/workflows/production-docker.yml
vendored
@ -1,7 +1,7 @@
|
||||
name: NodBot Production Dockerization
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -2,8 +2,7 @@
|
||||
.vscode
|
||||
package-lock.json
|
||||
.VSCodeCounter/
|
||||
env.dev
|
||||
env.prod
|
||||
.env*
|
||||
|
||||
# Custom folders
|
||||
# gifs/*
|
||||
|
95
functions.js
95
functions.js
@ -21,6 +21,22 @@ const Discord = require('discord.js');
|
||||
// Fuzzy text matching for db lookups
|
||||
const FuzzySearch = require('fuzzy-search');
|
||||
|
||||
// OpenAI
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
|
||||
const configuration = new Configuration({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
});
|
||||
const openai = new OpenAIApi(configuration);
|
||||
async function openAIStatus(o) {
|
||||
const response = await o.listModels();
|
||||
const models = response.data.data;
|
||||
models.forEach(e => {
|
||||
console.log(`Model ID: ${e.id}`);
|
||||
});
|
||||
};
|
||||
openAIStatus(openai);
|
||||
|
||||
// Various imports from other files
|
||||
const config = require('./config.json');
|
||||
const strings = require('./strings.json');
|
||||
@ -361,6 +377,31 @@ const functions = {
|
||||
|
||||
interaction.reply({ embeds: [ strainEmbed ]});
|
||||
},
|
||||
dalle(prompt, imageUrl, size) {
|
||||
const dalleEmbed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by DALL-E", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.addFields(
|
||||
{ name: "Prompt", value: prompt }
|
||||
)
|
||||
.setImage(imageUrl)
|
||||
.setFooter({ text: `This ${size} image cost ${strings.costs.dalle[size]}¢ to generate.` })
|
||||
return { embeds: [dalleEmbed] };
|
||||
},
|
||||
gpt(prompt, response, usage) {
|
||||
const gptEmbed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by GPT-3", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
||||
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢` })
|
||||
return { embeds: [gptEmbed] };
|
||||
},
|
||||
generatingResponse() {
|
||||
const embed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.setImage("https://media.tenor.com/aHMHzNGCb4kAAAAC/sucks.gif")
|
||||
.setDescription("Generating a response, please stand by.")
|
||||
.setFooter({ text: "Ligma balls" });
|
||||
return { embeds: [embed] };
|
||||
}
|
||||
},
|
||||
collect: {
|
||||
gifName(interaction) {
|
||||
@ -455,6 +496,12 @@ const functions = {
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
},
|
||||
openai(user, prompt, engine, tokens, usdc) {
|
||||
const query = `INSERT INTO openai (user, prompt, engine, tokens, usdc) VALUES (${db.escape(user)}, ${db.escape(prompt)}, ${db.escape(engine)}, ${db.escape(tokens)}, ${db.escape(usdc)})`;
|
||||
db.query(query, (err) => {
|
||||
if (err) throw err;
|
||||
});
|
||||
}
|
||||
},
|
||||
download: {
|
||||
@ -530,6 +577,37 @@ const functions = {
|
||||
}
|
||||
}
|
||||
},
|
||||
openAI: {
|
||||
chatPrompt(userPrompt) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const response = await openai.createCompletion({
|
||||
model: 'text-davinci-003',
|
||||
prompt: userPrompt,
|
||||
temperature: 0.7,
|
||||
max_tokens: 250
|
||||
}).catch(e => {
|
||||
reject(e);
|
||||
return null;
|
||||
});
|
||||
resolve(response.data);
|
||||
});
|
||||
},
|
||||
imagePrompt(userPrompt, size, userId) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const response = await openai.createImage({
|
||||
prompt: userPrompt,
|
||||
size: size,
|
||||
user: userId
|
||||
});
|
||||
resolve(response.data.data[0].url);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
// Parent-Level functions (miscellaneuous)
|
||||
closeRequest(requestId, interaction) {
|
||||
if (interaction.user.id == ownerId) {
|
||||
@ -549,6 +627,7 @@ const functions = {
|
||||
},
|
||||
spongebob(commandData) {
|
||||
let newText = '';
|
||||
let lastIsUpper = 0;
|
||||
for (const letter of commandData.args) {
|
||||
if (letter == ' ') {
|
||||
newText += letter;
|
||||
@ -556,13 +635,15 @@ const functions = {
|
||||
}
|
||||
if (letter == 'i' || letter == 'I') {
|
||||
newText += 'i';
|
||||
lastIsUpper = 0;
|
||||
continue;
|
||||
}
|
||||
if (letter == 'l' || letter == 'L') {
|
||||
newText += 'L';
|
||||
lastIsUpper = 1;
|
||||
continue;
|
||||
}
|
||||
if (Math.random() > 0.5) {
|
||||
if (lastIsUpper === 0) {
|
||||
newText += letter.toUpperCase();
|
||||
} else {
|
||||
newText += letter.toLowerCase();
|
||||
@ -571,6 +652,18 @@ const functions = {
|
||||
|
||||
return newText + ' <:spongebob:1053398825965985822>';
|
||||
},
|
||||
generateErrorId() {
|
||||
const digitCount = 10;
|
||||
const digits = [];
|
||||
for (let i = 0; i < digitCount; i++) {
|
||||
const randBase = Math.random();
|
||||
const randNumRaw = randBase * 10;
|
||||
const randNumRound = Math.floor(randNumRaw);
|
||||
digits.push(randNumRound);
|
||||
}
|
||||
const errorId = digits.join("");
|
||||
return errorId;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = functions;
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "nodbot",
|
||||
"version": "3.1.0",
|
||||
"description": "Nods and Nod Accessories.",
|
||||
"version": "3.2.0",
|
||||
"description": "Nods and Nod Accessories, now with ChatGPT!",
|
||||
"main": "main.js",
|
||||
"dependencies": {
|
||||
"@discordjs/builders": "^0.16.0",
|
||||
@ -12,10 +12,11 @@
|
||||
"dotenv": "^10.0.0",
|
||||
"fuzzy-search": "^3.2.1",
|
||||
"mysql": "^2.18.1",
|
||||
"openai": "^3.2.1",
|
||||
"tenorjs": "^1.0.10"
|
||||
},
|
||||
"engines": {
|
||||
"node": "16.x"
|
||||
"node": "18.x"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.32.0"
|
||||
|
27
slash-commands/chat.js
Normal file
27
slash-commands/chat.js
Normal file
@ -0,0 +1,27 @@
|
||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||
const fn = require('../functions.js');
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('chat')
|
||||
.setDescription('Send a message to ChatGPT')
|
||||
.addStringOption(o =>
|
||||
o.setName("prompt")
|
||||
.setDescription("Prompt to send to ChatGPT")
|
||||
.setRequired(true)
|
||||
),
|
||||
async execute(interaction) {
|
||||
await interaction.deferReply();
|
||||
await interaction.editReply(fn.embeds.generatingResponse());
|
||||
const userPrompt = interaction.options.getString("prompt");
|
||||
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
||||
const responseText = response.choices[0].text;
|
||||
const usage = {
|
||||
tokens: response.usage.total_tokens,
|
||||
usdc: response.usage.total_tokens * ( 0.2 / 1000 ) // 0.2¢ per 1000 tokens or 0.0002¢ per token.
|
||||
};
|
||||
const gptEmbed = fn.embeds.gpt(userPrompt, responseText, usage);
|
||||
await interaction.editReply(gptEmbed);
|
||||
fn.upload.openai(interaction.user.id, userPrompt, "gpt-3.5-turbo", usage.tokens, usage.usdc);
|
||||
},
|
||||
};
|
40
slash-commands/dalle.js
Normal file
40
slash-commands/dalle.js
Normal file
@ -0,0 +1,40 @@
|
||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||
const fn = require('../functions.js');
|
||||
const strings = require("../strings.json");
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('dalle')
|
||||
.setDescription('Generate an image with DALL-e')
|
||||
.addStringOption(o =>
|
||||
o.setName("prompt")
|
||||
.setDescription("Prompt to send to DALL-e")
|
||||
.setRequired(true)
|
||||
)
|
||||
.addStringOption(o =>
|
||||
o.setName("size")
|
||||
.setDescription("1024x1024, 512x512, 256x256")
|
||||
.setRequired(false)
|
||||
.addChoices(
|
||||
{ name: "1024x1024 (2¢)", value: "1024x1024" },
|
||||
{ name: "512x512 (1.8¢)", value: "512x512" },
|
||||
{ name: "256x256 (1.6¢)", value: "256x256" }
|
||||
)),
|
||||
async execute(interaction) {
|
||||
try {
|
||||
await interaction.deferReply();
|
||||
await interaction.editReply(fn.embeds.generatingResponse());
|
||||
const userPrompt = interaction.options.getString("prompt");
|
||||
const size = interaction.options.getString("size") ? interaction.options.getString("size") : "512x512";
|
||||
|
||||
const imageUrl = await fn.openAI.imagePrompt(userPrompt, size);
|
||||
const dalleEmbed = fn.embeds.dalle(userPrompt, imageUrl, size);
|
||||
await interaction.editReply(dalleEmbed);
|
||||
fn.upload.openai(interaction.user.id, userPrompt, "dalle", 0, strings.costs.dalle[size]);
|
||||
} catch (err) {
|
||||
const errorId = fn.generateErrorId();
|
||||
console.error(`${errorId}: ${err}`);
|
||||
await interaction.editReply(`An error has occured. Error ID: ${errorId}\n${err}`);
|
||||
}
|
||||
},
|
||||
};
|
10
strings.json
10
strings.json
@ -22,5 +22,15 @@
|
||||
"bussin fr, no cap",
|
||||
"ongggg no :billed_cap: fr fr"
|
||||
],
|
||||
"costs": {
|
||||
"gpt": {
|
||||
"gpt-3.5-turbo": 0.2
|
||||
},
|
||||
"dalle": {
|
||||
"256x256": 1.6,
|
||||
"512x512": 1.8,
|
||||
"1024x1024": 2.0
|
||||
}
|
||||
},
|
||||
"temp": {}
|
||||
}
|
Loading…
Reference in New Issue
Block a user