Swap to localLLM
This commit is contained in:
parent
46d86d2650
commit
baeac362de
35
package-lock.json
generated
35
package-lock.json
generated
@ -9,6 +9,7 @@
|
|||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"axios": "^1.7.9",
|
||||||
"discord.js": "^14.9.0",
|
"discord.js": "^14.9.0",
|
||||||
"dotenv": "^16.0.3",
|
"dotenv": "^16.0.3",
|
||||||
"openai": "^4.24.1",
|
"openai": "^4.24.1",
|
||||||
@ -255,6 +256,16 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/axios": {
|
||||||
|
"version": "1.7.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz",
|
||||||
|
"integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==",
|
||||||
|
"dependencies": {
|
||||||
|
"follow-redirects": "^1.15.6",
|
||||||
|
"form-data": "^4.0.0",
|
||||||
|
"proxy-from-env": "^1.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/base-64": {
|
"node_modules/base-64": {
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
|
||||||
@ -480,6 +491,25 @@
|
|||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||||
},
|
},
|
||||||
|
"node_modules/follow-redirects": {
|
||||||
|
"version": "1.15.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||||
|
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"debug": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/for-each": {
|
"node_modules/for-each": {
|
||||||
"version": "0.3.3",
|
"version": "0.3.3",
|
||||||
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
|
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
|
||||||
@ -1024,6 +1054,11 @@
|
|||||||
"undici-types": "~5.26.4"
|
"undici-types": "~5.26.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/proxy-from-env": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||||
|
},
|
||||||
"node_modules/regexp.prototype.flags": {
|
"node_modules/regexp.prototype.flags": {
|
||||||
"version": "1.5.1",
|
"version": "1.5.1",
|
||||||
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",
|
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz",
|
||||||
|
@ -17,9 +17,10 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://git.vfsh.dev/voidf1sh/nodgpt#readme",
|
"homepage": "https://git.vfsh.dev/voidf1sh/nodgpt#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"axios": "^1.7.9",
|
||||||
"discord.js": "^14.9.0",
|
"discord.js": "^14.9.0",
|
||||||
"dotenv": "^16.0.3",
|
"dotenv": "^16.0.3",
|
||||||
"string.prototype.replaceall": "^1.0.7",
|
"openai": "^4.24.1",
|
||||||
"openai": "^4.24.1"
|
"string.prototype.replaceall": "^1.0.7"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,9 @@
|
|||||||
"chatResCentsPer": 0.2,
|
"chatResCentsPer": 0.2,
|
||||||
"chatResUnits": 1000,
|
"chatResUnits": 1000,
|
||||||
"imgModel": "dall-e-3",
|
"imgModel": "dall-e-3",
|
||||||
"imgNumber": 1
|
"imgNumber": 1,
|
||||||
|
"sysPrompt": "You are a Discord bot named Nodbot aka NodGPT. You are a shitposting content serving bot in a server full of degenerates. Respond accordingly. Snark and sass is encouraged.",
|
||||||
|
"localModel": "llama3.2"
|
||||||
},
|
},
|
||||||
"temp": {}
|
"temp": {}
|
||||||
}
|
}
|
67
src/modules/VoidLLM.js
Normal file
67
src/modules/VoidLLM.js
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const strings = require('../data/strings.json');
|
||||||
|
const llmHost = "https://llm.vfsh.me";
|
||||||
|
const generateEndpoint = "/api/generate";
|
||||||
|
const chatEndpoint = "/api/chat";
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
Generate: class Generate {
|
||||||
|
constructor(model) {
|
||||||
|
this.model = model;
|
||||||
|
this.sysPrompt = strings.ai.sysPrompt;
|
||||||
|
}
|
||||||
|
send(prompt) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
axios.post(`${llmHost}${generateEndpoint}`, {
|
||||||
|
model: this.model,
|
||||||
|
system: this.sysPrompt,
|
||||||
|
prompt: prompt,
|
||||||
|
raw: false,
|
||||||
|
stream: false
|
||||||
|
}, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}).then((res) => {
|
||||||
|
resolve(res.data);
|
||||||
|
}).catch((err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Chat: class Chat {
|
||||||
|
constructor(model) {
|
||||||
|
this.model = model;
|
||||||
|
this.messages = [
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: strings.ai.sysPrompt
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
send(prompt) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
// Append the message to the messages array
|
||||||
|
this.messages.push({
|
||||||
|
role: "user",
|
||||||
|
content: prompt
|
||||||
|
});
|
||||||
|
axios.post(`${llmHost}${chatEndpoint}`, {
|
||||||
|
model: this.model,
|
||||||
|
messages: this.messages,
|
||||||
|
stream: false
|
||||||
|
}, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
}).then((res) => {
|
||||||
|
this.messages.push(res.data.message);
|
||||||
|
resolve(res.data.message);
|
||||||
|
}).catch((err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -15,6 +15,9 @@ const { EmbedBuilder } = Discord;
|
|||||||
const OpenAI = require("openai");
|
const OpenAI = require("openai");
|
||||||
const openai = new OpenAI();
|
const openai = new OpenAI();
|
||||||
|
|
||||||
|
// LocalLLM
|
||||||
|
const {Chat, Generate} = require("./VoidLLM.js");
|
||||||
|
|
||||||
// Various imports from other files
|
// Various imports from other files
|
||||||
const config = require('../data/config.json');
|
const config = require('../data/config.json');
|
||||||
const strings = require('../data/strings.json');
|
const strings = require('../data/strings.json');
|
||||||
@ -92,6 +95,13 @@ const functions = {
|
|||||||
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢. Generated using ${strings.ai.chatModel}` })
|
.setFooter({ text: `This prompt used ${usage.tokens} tokens for a cost of ${usage.usdc}¢. Generated using ${strings.ai.chatModel}` })
|
||||||
return { embeds: [gptEmbed] };
|
return { embeds: [gptEmbed] };
|
||||||
},
|
},
|
||||||
|
llm(prompt, response) {
|
||||||
|
const llmEmbed = new EmbedBuilder()
|
||||||
|
.setAuthor({ name: "NodBot powered by voidGPT", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
|
.setDescription(`**Prompt**\n${prompt}\n\n**Response**\n${response}`)
|
||||||
|
.setFooter({ text: `Generated using ${strings.ai.localModel}` });
|
||||||
|
return { embeds: [llmEmbed] };
|
||||||
|
},
|
||||||
generatingResponse() {
|
generatingResponse() {
|
||||||
const embed = new EmbedBuilder()
|
const embed = new EmbedBuilder()
|
||||||
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
.setAuthor({ name: "NodBot powered by OpenAI", iconURL: "https://assets.vfsh.dev/openai-logos/PNGs/openai-logomark.png" })
|
||||||
@ -135,6 +145,18 @@ const functions = {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
localLLM: {
|
||||||
|
async chat(prompt) {
|
||||||
|
try {
|
||||||
|
const chat = new Chat(strings.ai.localModel);
|
||||||
|
const response = await chat.send(prompt);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
async sendHeartbeat(url) {
|
async sendHeartbeat(url) {
|
||||||
if (isDev) console.log("Heartbeat Sent: " + url);
|
if (isDev) console.log("Heartbeat Sent: " + url);
|
||||||
https.get(url, async (response) => {
|
https.get(url, async (response) => {
|
||||||
|
@ -15,14 +15,17 @@ module.exports = {
|
|||||||
await interaction.deferReply();
|
await interaction.deferReply();
|
||||||
await interaction.editReply(fn.builders.embeds.generatingResponse());
|
await interaction.editReply(fn.builders.embeds.generatingResponse());
|
||||||
const userPrompt = interaction.options.getString("prompt");
|
const userPrompt = interaction.options.getString("prompt");
|
||||||
const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
const response = await fn.localLLM.chat(userPrompt).catch(e => console.error(e));
|
||||||
const responseText = response.choices[0].message.content;
|
const llmEmbed = fn.builders.embeds.llm(userPrompt, response.content);
|
||||||
const usage = {
|
await interaction.editReply(llmEmbed);
|
||||||
tokens: response.usage.total_tokens,
|
// const response = await fn.openAI.chatPrompt(userPrompt).catch(e => console.error(e));
|
||||||
usdc: (response.usage.prompt_tokens * (strings.ai.chatPromptCentsPer / strings.ai.chatPromptUnits)) +
|
// const responseText = response.choices[0].message.content;
|
||||||
(response.usage.completion_tokens * (strings.ai.chatResCentsPer / strings.ai.chatResUnits))
|
// const usage = {
|
||||||
};
|
// tokens: response.usage.total_tokens,
|
||||||
const gptEmbed = fn.builders.embeds.gpt(userPrompt, responseText, usage);
|
// usdc: (response.usage.prompt_tokens * (strings.ai.chatPromptCentsPer / strings.ai.chatPromptUnits)) +
|
||||||
await interaction.editReply(gptEmbed);
|
// (response.usage.completion_tokens * (strings.ai.chatResCentsPer / strings.ai.chatResUnits))
|
||||||
|
// };
|
||||||
|
// const gptEmbed = fn.builders.embeds.gpt(userPrompt, responseText, usage);
|
||||||
|
// await interaction.editReply(gptEmbed);
|
||||||
},
|
},
|
||||||
};
|
};
|
Loading…
Reference in New Issue
Block a user