Update language-inference-engine.js

This commit is contained in:
Namhyeon Go 2025-01-28 09:38:23 +09:00
parent 0485412b52
commit 47bb893c49

View File

@ -21,12 +21,13 @@ var biasMessage = "Write all future code examples in JavaScript ES3 using the ex
var engineProfiles = { var engineProfiles = {
"openai": { "openai": {
"defaultModel": "gpt-4o-mini",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer {apikey}" "Authorization": "Bearer {apikey}"
}, },
"url": "https://api.openai.com/v1/chat/completions", "url": "https://api.openai.com/v1/chat/completions",
"wrap": function(model, message) { "wrap": function(model, message, temperature) {
return { return {
"model": model, "model": model,
"messages": [{ "messages": [{
@ -35,7 +36,8 @@ var engineProfiles = {
}, { }, {
"role": "user", "role": "user",
"content": message "content": message
}] }],
"temperature": temperature
}; };
}, },
"callback": function(response) { "callback": function(response) {
@ -51,13 +53,14 @@ var engineProfiles = {
} }
}, },
"anthropic": { "anthropic": {
"defaultModel": "claude-3-5-sonnet-20241022",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"x-api-key": "{apikey}", "x-api-key": "{apikey}",
"anthropic-version": "2023-06-01" "anthropic-version": "2023-06-01"
}, },
"url": "https://api.anthropic.com/v1/messages", "url": "https://api.anthropic.com/v1/messages",
"wrap": function(model, message) { "wrap": function(model, message, temperature) {
return { return {
"model": model, "model": model,
"max_tokens": 1024, "max_tokens": 1024,
@ -70,7 +73,8 @@ var engineProfiles = {
"role": "user", "role": "user",
"content": message "content": message
} }
] ],
"temperature": temperature
}; };
}, },
"callback": function(response) { "callback": function(response) {
@ -90,12 +94,13 @@ var engineProfiles = {
} }
}, },
"groq": { "groq": {
"defaultModel": "llama-3.1-8b-instant",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer {apikey}" "Authorization": "Bearer {apikey}"
}, },
"url": "https://api.groq.com/openai/v1/chat/completions", "url": "https://api.groq.com/openai/v1/chat/completions",
"wrap": function(model, message) { "wrap": function(model, message, temperature) {
return { return {
"model": model, "model": model,
"messages": [ "messages": [
@ -107,7 +112,8 @@ var engineProfiles = {
"role": "user", "role": "user",
"content": message "content": message
} }
] ],
"temperature": temperature
}; };
}, },
"callback": function(response) { "callback": function(response) {
@ -123,13 +129,15 @@ var engineProfiles = {
} }
}, },
"xai": { "xai": {
"defaultModel": "grok-2-latest",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer {apikey}" "Authorization": "Bearer {apikey}"
}, },
"url": "https://api.x.ai/v1/chat/completions", "url": "https://api.x.ai/v1/chat/completions",
"wrap": function(model, message) { "wrap": function(model, message, temperature) {
return { return {
"model": model,
"messages": [ "messages": [
{ {
"role": "system", "role": "system",
@ -140,7 +148,7 @@ var engineProfiles = {
"content": message "content": message
} }
], ],
"model": model "temperature": temperature
} }
}, },
"callback": function(response) { "callback": function(response) {
@ -152,12 +160,13 @@ var engineProfiles = {
} }
}, },
"google": { "google": {
"defaultModel": "gemini-1.5-flash",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer {apikey}" "Authorization": "Bearer {apikey}"
}, },
"url": "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}", "url": "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}",
"warp": function(model, message) { "warp": function(model, message, temperature) {
return { return {
"contents": [ "contents": [
{ {
@ -189,12 +198,13 @@ var engineProfiles = {
} }
}, },
"deepseek": { "deepseek": {
"defaultModel": "deepseek-chat",
"headers": { "headers": {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer {apikey}" "Authorization": "Bearer {apikey}"
}, },
"url": "https://api.deepseek.com/chat/completions", "url": "https://api.deepseek.com/chat/completions",
"wrap": function(model, message) { "wrap": function(model, message, temperature) {
"model": model, "model": model,
"messages": [ "messages": [
{ {
@ -206,6 +216,7 @@ var engineProfiles = {
"content": message "content": message
} }
], ],
"temperature": temperature,
"stream": false "stream": false
} }
}, },
@ -225,6 +236,7 @@ var engineProfiles = {
function LanguageInferenceEngine() { function LanguageInferenceEngine() {
this.type = "llm"; // e.g. legacy (Legacy NLP), llm (LLM) this.type = "llm"; // e.g. legacy (Legacy NLP), llm (LLM)
this.provider = ""; this.provider = "";
this.model = "";
this.engineProfile = null; this.engineProfile = null;
this.setProvider = function(provider) { this.setProvider = function(provider) {
@ -232,6 +244,7 @@ function LanguageInferenceEngine() {
if (provider in engineProfiles) { if (provider in engineProfiles) {
this.engineProfile = engineProfiles[provider]; this.engineProfile = engineProfiles[provider];
this.model = this.engineProfile.defaultModel;
} }
return this; return this;
@ -252,7 +265,7 @@ function LanguageInferenceEngine() {
return this; return this;
} }
this.inference = function(message) { this.inference = function(message, temperature) {
if (this.engineProfile == null) if (this.engineProfile == null)
return this; return this;
@ -267,7 +280,7 @@ function LanguageInferenceEngine() {
"apikey": apikey "apikey": apikey
}) })
.setHeaders(headers) .setHeaders(headers)
.setRequestBody(wrap(message)) .setRequestBody(wrap(this.model, message, temperature))
.open("post", url) .open("post", url)
.send() .send()
.responseBody; .responseBody;
@ -281,7 +294,7 @@ exports.create = function() {
return new LanguageInferenceEngine(); return new LanguageInferenceEngine();
}; };
exports.VERSIONINFO = "Language Inference Engine (NLP/LLM) integration version 0.1.1"; exports.VERSIONINFO = "Language Inference Engine (NLP/LLM) integration version 0.1.2";
exports.AUTHOR = "abuse@catswords.net"; exports.AUTHOR = "abuse@catswords.net";
exports.global = global; exports.global = global;
exports.require = global.require; exports.require = global.require;