Compare commits
No commits in common. "38fdc07e4e4368fca36275084c2abd7424cc8e9f" and "1f85b97467a7970f4c9ffa7c01c46cdd36cf55c1" have entirely different histories.
38fdc07e4e
...
1f85b97467
2
.github/workflows/production-docker.yml
vendored
2
.github/workflows/production-docker.yml
vendored
@ -1,7 +1,7 @@
|
||||
name: NodBot Production Dockerization
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -2,7 +2,8 @@
|
||||
.vscode
|
||||
package-lock.json
|
||||
.VSCodeCounter/
|
||||
.env*
|
||||
env.dev
|
||||
env.prod
|
||||
|
||||
# Custom folders
|
||||
# gifs/*
|
||||
|
95
functions.js
95
functions.js
@ -21,22 +21,6 @@ const Discord = require('discord.js');
|
||||
// Fuzzy text matching for db lookups
|
||||
const FuzzySearch = require('fuzzy-search');
|
||||
|
||||
// OpenAI
|
||||
const { Configuration, OpenAIApi } = require("openai");
|
||||
|
||||
const configuration = new Configuration({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
});
|
||||
const openai = new OpenAIApi(configuration);
|
||||
async function openAIStatus(o) {
|
||||
const response = await o.listModels();
|
||||
const models = response.data.data;
|
||||
models.forEach(e => {
|
||||
console.log(`Model ID: ${e.id}`);
|
||||
});
|
||||
};
|
||||
openAIStatus(openai);
|
||||
|
||||
// Various imports from other files
|
||||
const config = require('./config.json');
|
||||
const strings = require('./strings.json');
|
||||
@ -377,31 +361,6 @@ const functions = {
|
||||
|
||||
interaction.reply({ embeds: [ strainEmbed ]});
|
||||
},
|
||||
dalle(prompt, imageUrl, size) {
|
||||
const dalleEmbed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by DALL-E", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.addFields(
|
||||
{ name: "Prompt", value: prompt }
|
||||
)
|
||||
.setImage(imageUrl)
|
||||
.setFooter({ text: `This ${size} image cost ${strings.costs.dalle[size]}¢ to generate.` })
|
||||
return { embeds: [dalleEmbed] };
|
||||
},
|
||||
gpt(prompt, response, usage) {
|
||||
const gptEmbed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by GPT-3", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
||||
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢` })
|
||||
return { embeds: [gptEmbed] };
|
||||
},
|
||||
generatingResponse() {
|
||||
const embed = new Discord.MessageEmbed()
|
||||
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||
.setImage("https://media.tenor.com/aHMHzNGCb4kAAAAC/sucks.gif")
|
||||
.setDescription("Generating a response, please stand by.")
|
||||
.setFooter({ text: "Ligma balls" });
|
||||
return { embeds: [embed] };
|
||||
}
|
||||
},
|
||||
collect: {
|
||||
gifName(interaction) {
|
||||
@ -496,12 +455,6 @@ const functions = {
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
},
|
||||
openai(user, prompt, engine, tokens, usdc) {
|
||||
const query = `INSERT INTO openai (user, prompt, engine, tokens, usdc) VALUES (${db.escape(user)}, ${db.escape(prompt)}, ${db.escape(engine)}, ${db.escape(tokens)}, ${db.escape(usdc)})`;
|
||||
db.query(query, (err) => {
|
||||
if (err) throw err;
|
||||
});
|
||||
}
|
||||
},
|
||||
download: {
|
||||
@ -577,37 +530,6 @@ const functions = {
|
||||
}
|
||||
}
|
||||
},
|
||||
openAI: {
|
||||
chatPrompt(userPrompt) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const response = await openai.createCompletion({
|
||||
model: 'text-davinci-003',
|
||||
prompt: userPrompt,
|
||||
temperature: 0.7,
|
||||
max_tokens: 250
|
||||
}).catch(e => {
|
||||
reject(e);
|
||||
return null;
|
||||
});
|
||||
resolve(response.data);
|
||||
});
|
||||
},
|
||||
imagePrompt(userPrompt, size, userId) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const response = await openai.createImage({
|
||||
prompt: userPrompt,
|
||||
size: size,
|
||||
user: userId
|
||||
});
|
||||
resolve(response.data.data[0].url);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
return;
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
// Parent-Level functions (miscellaneuous)
|
||||
closeRequest(requestId, interaction) {
|
||||
if (interaction.user.id == ownerId) {
|
||||
@ -627,7 +549,6 @@ const functions = {
|
||||
},
|
||||
spongebob(commandData) {
|
||||
let newText = '';
|
||||
let lastIsUpper = 0;
|
||||
for (const letter of commandData.args) {
|
||||
if (letter == ' ') {
|
||||
newText += letter;
|
||||
@ -635,15 +556,13 @@ const functions = {
|
||||
}
|
||||
if (letter == 'i' || letter == 'I') {
|
||||
newText += 'i';
|
||||
lastIsUpper = 0;
|
||||
continue;
|
||||
}
|
||||
if (letter == 'l' || letter == 'L') {
|
||||
newText += 'L';
|
||||
lastIsUpper = 1;
|
||||
continue;
|
||||
}
|
||||
if (lastIsUpper === 0) {
|
||||
if (Math.random() > 0.5) {
|
||||
newText += letter.toUpperCase();
|
||||
} else {
|
||||
newText += letter.toLowerCase();
|
||||
@ -652,18 +571,6 @@ const functions = {
|
||||
|
||||
return newText + ' <:spongebob:1053398825965985822>';
|
||||
},
|
||||
generateErrorId() {
|
||||
const digitCount = 10;
|
||||
const digits = [];
|
||||
for (let i = 0; i < digitCount; i++) {
|
||||
const randBase = Math.random();
|
||||
const randNumRaw = randBase * 10;
|
||||
const randNumRound = Math.floor(randNumRaw);
|
||||
digits.push(randNumRound);
|
||||
}
|
||||
const errorId = digits.join("");
|
||||
return errorId;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = functions;
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "nodbot",
|
||||
"version": "3.2.0",
|
||||
"description": "Nods and Nod Accessories, now with ChatGPT!",
|
||||
"version": "3.1.0",
|
||||
"description": "Nods and Nod Accessories.",
|
||||
"main": "main.js",
|
||||
"dependencies": {
|
||||
"@discordjs/builders": "^0.16.0",
|
||||
@ -12,11 +12,10 @@
|
||||
"dotenv": "^10.0.0",
|
||||
"fuzzy-search": "^3.2.1",
|
||||
"mysql": "^2.18.1",
|
||||
"openai": "^3.2.1",
|
||||
"tenorjs": "^1.0.10"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18.x"
|
||||
"node": "16.x"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.32.0"
|
||||
|
@ -1,27 +0,0 @@
|
||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||
const fn = require('../functions.js');
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('chat')
|
||||
.setDescription('Send a message to ChatGPT')
|
||||
.addStringOption(o =>
|
||||
o.setName("prompt")
|
||||
.setDescription("Prompt to send to ChatGPT")
|
||||
.setRequired(true)
|
||||
),
|
||||
async execute(interaction) {
|
||||
await interaction.deferReply();
|
||||
await interaction.editReply(fn.embeds.generatingResponse());
|
||||
const userPrompt = interaction.options.getString("prompt");
|
||||
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
||||
const responseText = response.choices[0].text;
|
||||
const usage = {
|
||||
tokens: response.usage.total_tokens,
|
||||
usdc: response.usage.total_tokens * ( 0.2 / 1000 ) // 0.2¢ per 1000 tokens or 0.0002¢ per token.
|
||||
};
|
||||
const gptEmbed = fn.embeds.gpt(userPrompt, responseText, usage);
|
||||
await interaction.editReply(gptEmbed);
|
||||
fn.upload.openai(interaction.user.id, userPrompt, "gpt-3.5-turbo", usage.tokens, usage.usdc);
|
||||
},
|
||||
};
|
@ -1,40 +0,0 @@
|
||||
const { SlashCommandBuilder } = require('@discordjs/builders');
|
||||
const fn = require('../functions.js');
|
||||
const strings = require("../strings.json");
|
||||
|
||||
module.exports = {
|
||||
data: new SlashCommandBuilder()
|
||||
.setName('dalle')
|
||||
.setDescription('Generate an image with DALL-e')
|
||||
.addStringOption(o =>
|
||||
o.setName("prompt")
|
||||
.setDescription("Prompt to send to DALL-e")
|
||||
.setRequired(true)
|
||||
)
|
||||
.addStringOption(o =>
|
||||
o.setName("size")
|
||||
.setDescription("1024x1024, 512x512, 256x256")
|
||||
.setRequired(false)
|
||||
.addChoices(
|
||||
{ name: "1024x1024 (2¢)", value: "1024x1024" },
|
||||
{ name: "512x512 (1.8¢)", value: "512x512" },
|
||||
{ name: "256x256 (1.6¢)", value: "256x256" }
|
||||
)),
|
||||
async execute(interaction) {
|
||||
try {
|
||||
await interaction.deferReply();
|
||||
await interaction.editReply(fn.embeds.generatingResponse());
|
||||
const userPrompt = interaction.options.getString("prompt");
|
||||
const size = interaction.options.getString("size") ? interaction.options.getString("size") : "512x512";
|
||||
|
||||
const imageUrl = await fn.openAI.imagePrompt(userPrompt, size);
|
||||
const dalleEmbed = fn.embeds.dalle(userPrompt, imageUrl, size);
|
||||
await interaction.editReply(dalleEmbed);
|
||||
fn.upload.openai(interaction.user.id, userPrompt, "dalle", 0, strings.costs.dalle[size]);
|
||||
} catch (err) {
|
||||
const errorId = fn.generateErrorId();
|
||||
console.error(`${errorId}: ${err}`);
|
||||
await interaction.editReply(`An error has occured. Error ID: ${errorId}\n${err}`);
|
||||
}
|
||||
},
|
||||
};
|
10
strings.json
10
strings.json
@ -22,15 +22,5 @@
|
||||
"bussin fr, no cap",
|
||||
"ongggg no :billed_cap: fr fr"
|
||||
],
|
||||
"costs": {
|
||||
"gpt": {
|
||||
"gpt-3.5-turbo": 0.2
|
||||
},
|
||||
"dalle": {
|
||||
"256x256": 1.6,
|
||||
"512x512": 1.8,
|
||||
"1024x1024": 2.0
|
||||
}
|
||||
},
|
||||
"temp": {}
|
||||
}
|
Loading…
Reference in New Issue
Block a user