Swap to localLLM

This commit is contained in:
Skylar Grant 2025-01-05 18:23:54 -05:00
parent 46d86d2650
commit baeac362de
6 changed files with 142 additions and 12 deletions

35
package-lock.json generated
View File

@ -9,6 +9,7 @@
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"axios": "^1.7.9",
"discord.js": "^14.9.0",
"dotenv": "^16.0.3",
"openai": "^4.24.1",
@ -255,6 +256,16 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/axios": {
"version": "1.7.9",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
"dependencies": {
"follow-redirects": "^1.15.6",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/base-64": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
@ -480,6 +491,25 @@
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
},
"node_modules/follow-redirects": {
"version": "1.15.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"engines": {
"node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
}
},
"node_modules/for-each": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
@ -1024,6 +1054,11 @@
"undici-types": "~5.26.4"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",

View File

@ -17,9 +17,10 @@
},
"homepage": "https://git.vfsh.dev/voidf1sh/nodgpt#readme",
"dependencies": {
"axios": "^1.7.9",
"discord.js": "^14.9.0",
"dotenv": "^16.0.3",
"string.prototype.replaceall": "^1.0.7",
"openai": "^4.24.1"
"openai": "^4.24.1",
"string.prototype.replaceall": "^1.0.7"
}
}

View File

@ -36,7 +36,9 @@
"chatResCentsPer": 0.2,
"chatResUnits": 1000,
"imgModel": "dall-e-3",
"imgNumber": 1
"imgNumber": 1,
"sysPrompt": "You are a Discord bot named Nodbot aka NodGPT. You are a shitposting content serving bot in a server full of degenerates. Respond accordingly. Snark and sass is encouraged.",
"localModel": "llama3.2"
},
"temp": {}
}

67
src/modules/VoidLLM.js Normal file
View File

@ -0,0 +1,67 @@
const axios = require('axios');
const strings = require('../data/strings.json');
const llmHost = "https://llm.vfsh.me";
const generateEndpoint = "/api/generate";
const chatEndpoint = "/api/chat";
module.exports = {
Generate: class Generate {
constructor(model) {
this.model = model;
this.sysPrompt = strings.ai.sysPrompt;
}
send(prompt) {
return new Promise((resolve, reject) => {
axios.post(`${llmHost}${generateEndpoint}`, {
model: this.model,
system: this.sysPrompt,
prompt: prompt,
raw: false,
stream: false
}, {
headers: {
'Content-Type': 'application/json'
}
}).then((res) => {
resolve(res.data);
}).catch((err) => {
reject(err);
});
});
}
},
Chat: class Chat {
constructor(model) {
this.model = model;
this.messages = [
{
role: "system",
content: strings.ai.sysPrompt
}
];
}
send(prompt) {
return new Promise((resolve, reject) => {
// Append the message to the messages array
this.messages.push({
role: "user",
content: prompt
});
axios.post(`${llmHost}${chatEndpoint}`, {
model: this.model,
messages: this.messages,
stream: false
}, {
headers: {
'Content-Type': 'application/json',
}
}).then((res) => {
this.messages.push(res.data.message);
resolve(res.data.message);
}).catch((err) => {
reject(err);
});
});
}
}
}

View File

@ -15,6 +15,9 @@ const { EmbedBuilder } = Discord;
const OpenAI = require("openai");
const openai = new OpenAI();
// LocalLLM
const {Chat, Generate} = require("./VoidLLM.js");
// Various imports from other files
const config = require('../data/config.json');
const strings = require('../data/strings.json');
@ -92,6 +95,13 @@ const functions = {
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢. Generated using ${strings.ai.chatModel}` })
return { embeds: [gptEmbed] };
},
llm(prompt, response) {
const llmEmbed = new EmbedBuilder()
.setAuthor({ name: "NodBot powered by voidGPT", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
.setFooter({ text: `Generated using ${strings.ai.localModel}` });
return { embeds: [llmEmbed] };
},
generatingResponse() {
const embed = new EmbedBuilder()
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
@ -135,6 +145,18 @@ const functions = {
});
}
},
localLLM: {
async chat(prompt) {
try {
const chat = new Chat(strings.ai.localModel);
const response = await chat.send(prompt);
return response;
} catch (e) {
console.error(e);
return;
}
}
},
async sendHeartbeat(url) {
if (isDev) console.log("Heartbeat Sent: " + url);
https.get(url, async (response) => {

View File

@ -15,14 +15,17 @@ module.exports = {
await interaction.deferReply();
await interaction.editReply(fn.builders.embeds.generatingResponse());
const userPrompt = interaction.options.getString("prompt");
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
const responseText = response.choices[0].message.content;
const usage = {
tokens: response.usage.total_tokens,
usdc: (response.usage.prompt_tokens * (strings.ai.chatPromptCentsPer / strings.ai.chatPromptUnits)) +
(response.usage.completion_tokens * (strings.ai.chatResCentsPer / strings.ai.chatResUnits))
};
const gptEmbed = fn.builders.embeds.gpt(userPrompt, responseText, usage);
await interaction.editReply(gptEmbed);
const response = await fn.localLLM.chat(userPrompt).catch(e => console.error(e));
const llmEmbed = fn.builders.embeds.llm(userPrompt, response.content);
await interaction.editReply(llmEmbed);
// const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
// const responseText = response.choices[0].message.content;
// const usage = {
// tokens: response.usage.total_tokens,
// usdc: (response.usage.prompt_tokens * (strings.ai.chatPromptCentsPer / strings.ai.chatPromptUnits)) +
// (response.usage.completion_tokens * (strings.ai.chatResCentsPer / strings.ai.chatResUnits))
// };
// const gptEmbed = fn.builders.embeds.gpt(userPrompt, responseText, usage);
// await interaction.editReply(gptEmbed);
},
};