Merge branch 'nodGPT'
This commit is contained in:
commit
98acb652af
90
functions.js
90
functions.js
@ -21,6 +21,22 @@ const Discord = require('discord.js');
|
|||||||
// Fuzzy text matching for db lookups
|
// Fuzzy text matching for db lookups
|
||||||
const FuzzySearch = require('fuzzy-search');
|
const FuzzySearch = require('fuzzy-search');
|
||||||
|
|
||||||
|
// OpenAI
|
||||||
|
const { Configuration, OpenAIApi } = require("openai");
|
||||||
|
|
||||||
|
const configuration = new Configuration({
|
||||||
|
apiKey: process.env.OPENAI_API_KEY,
|
||||||
|
});
|
||||||
|
const openai = new OpenAIApi(configuration);
|
||||||
|
async function openAIStatus(o) {
|
||||||
|
const response = await o.listModels();
|
||||||
|
const models = response.data.data;
|
||||||
|
models.forEach(e => {
|
||||||
|
console.log(`Model ID: ${e.id}`);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
openAIStatus(openai);
|
||||||
|
|
||||||
// Various imports from other files
|
// Various imports from other files
|
||||||
const config = require('./config.json');
|
const config = require('./config.json');
|
||||||
const strings = require('./strings.json');
|
const strings = require('./strings.json');
|
||||||
@ -361,6 +377,31 @@ const functions = {
|
|||||||
|
|
||||||
interaction.reply({ embeds: [ strainEmbed ]});
|
interaction.reply({ embeds: [ strainEmbed ]});
|
||||||
},
|
},
|
||||||
|
dalle(prompt, imageUrl, size) {
|
||||||
|
const dalleEmbed = new Discord.MessageEmbed()
|
||||||
|
.setAuthor({ name: "NodBot powered by DALL-E", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
|
.addFields(
|
||||||
|
{ name: "Prompt", value: prompt }
|
||||||
|
)
|
||||||
|
.setImage(imageUrl)
|
||||||
|
.setFooter({ text: `This ${size} image cost ${strings.costs.dalle[size]}¢ to generate.` })
|
||||||
|
return { embeds: [dalleEmbed] };
|
||||||
|
},
|
||||||
|
gpt(prompt, response, usage) {
|
||||||
|
const gptEmbed = new Discord.MessageEmbed()
|
||||||
|
.setAuthor({ name: "NodBot powered by GPT-3", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
|
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
||||||
|
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢` })
|
||||||
|
return { embeds: [gptEmbed] };
|
||||||
|
},
|
||||||
|
generatingResponse() {
|
||||||
|
const embed = new Discord.MessageEmbed()
|
||||||
|
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
|
.setImage("https://media.tenor.com/aHMHzNGCb4kAAAAC/sucks.gif")
|
||||||
|
.setDescription("Generating a response, please stand by.")
|
||||||
|
.setFooter({ text: "Ligma balls" });
|
||||||
|
return { embeds: [embed] };
|
||||||
|
}
|
||||||
},
|
},
|
||||||
collect: {
|
collect: {
|
||||||
gifName(interaction) {
|
gifName(interaction) {
|
||||||
@ -455,6 +496,12 @@ const functions = {
|
|||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
|
},
|
||||||
|
openai(user, prompt, engine, tokens, usdc) {
|
||||||
|
const query = `INSERT INTO openai (user, prompt, engine, tokens, usdc) VALUES (${db.escape(user)}, ${db.escape(prompt)}, ${db.escape(engine)}, ${db.escape(tokens)}, ${db.escape(usdc)})`;
|
||||||
|
db.query(query, (err) => {
|
||||||
|
if (err) throw err;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
download: {
|
download: {
|
||||||
@ -530,6 +577,37 @@ const functions = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
openAI: {
|
||||||
|
chatPrompt(userPrompt) {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const response = await openai.createCompletion({
|
||||||
|
model: 'text-davinci-003',
|
||||||
|
prompt: userPrompt,
|
||||||
|
temperature: 0.7,
|
||||||
|
max_tokens: 250
|
||||||
|
}).catch(e => {
|
||||||
|
reject(e);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
resolve(response.data);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
imagePrompt(userPrompt, size, userId) {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
try {
|
||||||
|
const response = await openai.createImage({
|
||||||
|
prompt: userPrompt,
|
||||||
|
size: size,
|
||||||
|
user: userId
|
||||||
|
});
|
||||||
|
resolve(response.data.data[0].url);
|
||||||
|
} catch (e) {
|
||||||
|
reject(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
// Parent-Level functions (miscellaneuous)
|
// Parent-Level functions (miscellaneuous)
|
||||||
closeRequest(requestId, interaction) {
|
closeRequest(requestId, interaction) {
|
||||||
if (interaction.user.id == ownerId) {
|
if (interaction.user.id == ownerId) {
|
||||||
@ -571,6 +649,18 @@ const functions = {
|
|||||||
|
|
||||||
return newText + ' <:spongebob:1053398825965985822>';
|
return newText + ' <:spongebob:1053398825965985822>';
|
||||||
},
|
},
|
||||||
|
generateErrorId() {
|
||||||
|
const digitCount = 10;
|
||||||
|
const digits = [];
|
||||||
|
for (let i = 0; i < digitCount; i++) {
|
||||||
|
const randBase = Math.random();
|
||||||
|
const randNumRaw = randBase * 10;
|
||||||
|
const randNumRound = Math.floor(randNumRaw);
|
||||||
|
digits.push(randNumRound);
|
||||||
|
}
|
||||||
|
const errorId = digits.join("");
|
||||||
|
return errorId;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = functions;
|
module.exports = functions;
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "nodbot",
|
"name": "nodbot",
|
||||||
"version": "3.1.0",
|
"version": "3.2.0",
|
||||||
"description": "Nods and Nod Accessories.",
|
"description": "Nods and Nod Accessories, now with ChatGPT!",
|
||||||
"main": "main.js",
|
"main": "main.js",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@discordjs/builders": "^0.16.0",
|
"@discordjs/builders": "^0.16.0",
|
||||||
@ -12,10 +12,11 @@
|
|||||||
"dotenv": "^10.0.0",
|
"dotenv": "^10.0.0",
|
||||||
"fuzzy-search": "^3.2.1",
|
"fuzzy-search": "^3.2.1",
|
||||||
"mysql": "^2.18.1",
|
"mysql": "^2.18.1",
|
||||||
|
"openai": "^3.2.1",
|
||||||
"tenorjs": "^1.0.10"
|
"tenorjs": "^1.0.10"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "16.x"
|
"node": "18.x"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "^7.32.0"
|
"eslint": "^7.32.0"
|
||||||
|
27
slash-commands/chat.js
Normal file
27
slash-commands/chat.js
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||||
|
const fn = require('../functions.js');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
data: new SlashCommandBuilder()
|
||||||
|
.setName('chat')
|
||||||
|
.setDescription('Send a message to ChatGPT')
|
||||||
|
.addStringOption(o =>
|
||||||
|
o.setName("prompt")
|
||||||
|
.setDescription("Prompt to send to ChatGPT")
|
||||||
|
.setRequired(true)
|
||||||
|
),
|
||||||
|
async execute(interaction) {
|
||||||
|
await interaction.deferReply();
|
||||||
|
await interaction.editReply(fn.embeds.generatingResponse());
|
||||||
|
const userPrompt = interaction.options.getString("prompt");
|
||||||
|
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
||||||
|
const responseText = response.choices[0].text;
|
||||||
|
const usage = {
|
||||||
|
tokens: response.usage.total_tokens,
|
||||||
|
usdc: response.usage.total_tokens * ( 0.2 / 1000 ) // 0.2¢ per 1000 tokens or 0.0002¢ per token.
|
||||||
|
};
|
||||||
|
const gptEmbed = fn.embeds.gpt(userPrompt, responseText, usage);
|
||||||
|
await interaction.editReply(gptEmbed);
|
||||||
|
fn.upload.openai(interaction.user.id, userPrompt, "gpt-3.5-turbo", usage.tokens, usage.usdc);
|
||||||
|
},
|
||||||
|
};
|
40
slash-commands/dalle.js
Normal file
40
slash-commands/dalle.js
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||||
|
const fn = require('../functions.js');
|
||||||
|
const strings = require("../strings.json");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
data: new SlashCommandBuilder()
|
||||||
|
.setName('dalle')
|
||||||
|
.setDescription('Generate an image with DALL-e')
|
||||||
|
.addStringOption(o =>
|
||||||
|
o.setName("prompt")
|
||||||
|
.setDescription("Prompt to send to DALL-e")
|
||||||
|
.setRequired(true)
|
||||||
|
)
|
||||||
|
.addStringOption(o =>
|
||||||
|
o.setName("size")
|
||||||
|
.setDescription("1024x1024, 512x512, 256x256")
|
||||||
|
.setRequired(false)
|
||||||
|
.addChoices(
|
||||||
|
{ name: "1024x1024 (2¢)", value: "1024x1024" },
|
||||||
|
{ name: "512x512 (1.8¢)", value: "512x512" },
|
||||||
|
{ name: "256x256 (1.6¢)", value: "256x256" }
|
||||||
|
)),
|
||||||
|
async execute(interaction) {
|
||||||
|
try {
|
||||||
|
await interaction.deferReply();
|
||||||
|
await interaction.editReply(fn.embeds.generatingResponse());
|
||||||
|
const userPrompt = interaction.options.getString("prompt");
|
||||||
|
const size = interaction.options.getString("size") ? interaction.options.getString("size") : "512x512";
|
||||||
|
|
||||||
|
const imageUrl = await fn.openAI.imagePrompt(userPrompt, size);
|
||||||
|
const dalleEmbed = fn.embeds.dalle(userPrompt, imageUrl, size);
|
||||||
|
await interaction.editReply(dalleEmbed);
|
||||||
|
fn.upload.openai(interaction.user.id, userPrompt, "dalle", 0, strings.costs.dalle[size]);
|
||||||
|
} catch (err) {
|
||||||
|
const errorId = fn.generateErrorId();
|
||||||
|
console.error(`${errorId}: ${err}`);
|
||||||
|
await interaction.editReply(`An error has occured. Error ID: ${errorId}\n${err}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
10
strings.json
10
strings.json
@ -22,5 +22,15 @@
|
|||||||
"bussin fr, no cap",
|
"bussin fr, no cap",
|
||||||
"ongggg no :billed_cap: fr fr"
|
"ongggg no :billed_cap: fr fr"
|
||||||
],
|
],
|
||||||
|
"costs": {
|
||||||
|
"gpt": {
|
||||||
|
"gpt-3.5-turbo": 0.2
|
||||||
|
},
|
||||||
|
"dalle": {
|
||||||
|
"256x256": 1.6,
|
||||||
|
"512x512": 1.8,
|
||||||
|
"1024x1024": 2.0
|
||||||
|
}
|
||||||
|
},
|
||||||
"temp": {}
|
"temp": {}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user