mirror of
https://github.com/gnh1201/welsonjs.git
synced 2025-05-11 20:21:03 +00:00
Update language-inference-engine.js
1. Fix mistypos (e.g.., missing return) 2. Add clovastudio LLM
This commit is contained in:
parent
5b092c7f7a
commit
2251494a48
|
@ -26,7 +26,13 @@ var ENGINE_PROFILES = {
|
||||||
"openai": {
|
"openai": {
|
||||||
"type": "llm",
|
"type": "llm",
|
||||||
"availableModels": [
|
"availableModels": [
|
||||||
"gpt-4o-mini"
|
"gpt-4o-mini",
|
||||||
|
"gpt-4o",
|
||||||
|
"o1",
|
||||||
|
"o1-mini",
|
||||||
|
"o3-mini",
|
||||||
|
"gpt-4-turbo",
|
||||||
|
"gpt-3.5-turbo"
|
||||||
],
|
],
|
||||||
"headers": {
|
"headers": {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
@ -167,7 +173,7 @@ var ENGINE_PROFILES = {
|
||||||
],
|
],
|
||||||
"temperature": temperature,
|
"temperature": temperature,
|
||||||
"stream": false
|
"stream": false
|
||||||
}
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
return response.choices.reduce(function(a, x) {
|
return response.choices.reduce(function(a, x) {
|
||||||
|
@ -183,8 +189,7 @@ var ENGINE_PROFILES = {
|
||||||
"gemini-1.5-flash"
|
"gemini-1.5-flash"
|
||||||
],
|
],
|
||||||
"headers": {
|
"headers": {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json"
|
||||||
"Authorization": "Bearer {apikey}"
|
|
||||||
},
|
},
|
||||||
"url": "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}",
|
"url": "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}",
|
||||||
"warp": function(model, message, temperature) {
|
"warp": function(model, message, temperature) {
|
||||||
|
@ -198,7 +203,7 @@ var ENGINE_PROFILES = {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
if ("error" in response) {
|
if ("error" in response) {
|
||||||
|
@ -225,19 +230,21 @@ var ENGINE_PROFILES = {
|
||||||
],
|
],
|
||||||
"url": "https://api.mistral.ai/v1/chat/completions",
|
"url": "https://api.mistral.ai/v1/chat/completions",
|
||||||
"wrap": function(model, message, temperature) {
|
"wrap": function(model, message, temperature) {
|
||||||
"model": model,
|
return {
|
||||||
"messages": [
|
"model": model,
|
||||||
{
|
"messages": [
|
||||||
"role": "system",
|
{
|
||||||
"content": BIAS_MESSAGE
|
"role": "system",
|
||||||
},
|
"content": BIAS_MESSAGE
|
||||||
{
|
},
|
||||||
"role": "user",
|
{
|
||||||
"content": message
|
"role": "user",
|
||||||
}
|
"content": message
|
||||||
],
|
}
|
||||||
"temperature": temperature,
|
],
|
||||||
"stream": false
|
"temperature": temperature,
|
||||||
|
"stream": false
|
||||||
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
if ("error" in response) {
|
if ("error" in response) {
|
||||||
|
@ -262,19 +269,21 @@ var ENGINE_PROFILES = {
|
||||||
},
|
},
|
||||||
"url": "https://api.deepseek.com/chat/completions",
|
"url": "https://api.deepseek.com/chat/completions",
|
||||||
"wrap": function(model, message, temperature) {
|
"wrap": function(model, message, temperature) {
|
||||||
"model": model,
|
return {
|
||||||
"messages": [
|
"model": model,
|
||||||
{
|
"messages": [
|
||||||
"role": "system",
|
{
|
||||||
"content": BIAS_MESSAGE
|
"role": "system",
|
||||||
},
|
"content": BIAS_MESSAGE
|
||||||
{
|
},
|
||||||
"role": "user",
|
{
|
||||||
"content": message
|
"role": "user",
|
||||||
}
|
"content": message
|
||||||
],
|
}
|
||||||
"temperature": temperature,
|
],
|
||||||
"stream": false
|
"temperature": temperature,
|
||||||
|
"stream": false
|
||||||
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
if ("error" in response) {
|
if ("error" in response) {
|
||||||
|
@ -299,19 +308,67 @@ var ENGINE_PROFILES = {
|
||||||
},
|
},
|
||||||
"url": "https://api.moonshot.cn/v1",
|
"url": "https://api.moonshot.cn/v1",
|
||||||
"wrap": function(model, message, temperature) {
|
"wrap": function(model, message, temperature) {
|
||||||
"model": model,
|
return {
|
||||||
"messages": [
|
"model": model,
|
||||||
{
|
"messages": [
|
||||||
"role": "system",
|
{
|
||||||
"content": BIAS_MESSAGE
|
"role": "system",
|
||||||
},
|
"content": BIAS_MESSAGE
|
||||||
{
|
},
|
||||||
"role": "user",
|
{
|
||||||
"content": message
|
"role": "user",
|
||||||
}
|
"content": message
|
||||||
],
|
}
|
||||||
"temperature": temperature,
|
],
|
||||||
"stream": false
|
"temperature": temperature,
|
||||||
|
"stream": false
|
||||||
|
};
|
||||||
|
},
|
||||||
|
"callback": function(response) {
|
||||||
|
if ("error" in response) {
|
||||||
|
return ["Error: " + response.error.message];
|
||||||
|
} else {
|
||||||
|
return response.choices.reduce(function(a, x) {
|
||||||
|
a.push(x.message.content);
|
||||||
|
|
||||||
|
return a;
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clovastudio": {
|
||||||
|
"type": "llm",
|
||||||
|
"availableModels": [
|
||||||
|
"HCX-003",
|
||||||
|
"HCX-DASH-001"
|
||||||
|
],
|
||||||
|
"headers": {
|
||||||
|
"Authorization": "Bearer {apikey}",
|
||||||
|
"NCP-CLOVASTUDIO-REQUEST-ID": "",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json"
|
||||||
|
},
|
||||||
|
"url": "https://clovastudio.stream.ntruss.com/testapp/v1/chat-completions/{model}",
|
||||||
|
"wrap": function(model, message, temperature) {
|
||||||
|
return {
|
||||||
|
"topK": 0,
|
||||||
|
"includeAiFilters": true,
|
||||||
|
"maxTokens": 4096,
|
||||||
|
"temperature": temperature,
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": BIAS_MESSAGE
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": message
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stopBefore": [],
|
||||||
|
"repeatPenalty": 5.0,
|
||||||
|
"topP": 0.8
|
||||||
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
if ("error" in response) {
|
if ("error" in response) {
|
||||||
|
@ -341,19 +398,21 @@ var ENGINE_PROFILES = {
|
||||||
},
|
},
|
||||||
"url": "https://ai.catswords.net",
|
"url": "https://ai.catswords.net",
|
||||||
"wrap": function(model, message, temperature) {
|
"wrap": function(model, message, temperature) {
|
||||||
"model": model,
|
return {
|
||||||
"messages": [
|
"model": model,
|
||||||
{
|
"messages": [
|
||||||
"role": "system",
|
{
|
||||||
"content": BIAS_MESSAGE
|
"role": "system",
|
||||||
},
|
"content": BIAS_MESSAGE
|
||||||
{
|
},
|
||||||
"role": "user",
|
{
|
||||||
"content": message
|
"role": "user",
|
||||||
}
|
"content": message
|
||||||
],
|
}
|
||||||
"temperature": temperature,
|
],
|
||||||
"stream": false
|
"temperature": temperature,
|
||||||
|
"stream": false
|
||||||
|
};
|
||||||
},
|
},
|
||||||
"callback": function(response) {
|
"callback": function(response) {
|
||||||
if ("error" in response) {
|
if ("error" in response) {
|
||||||
|
@ -370,7 +429,7 @@ var ENGINE_PROFILES = {
|
||||||
};
|
};
|
||||||
|
|
||||||
function LanguageInferenceEngine() {
|
function LanguageInferenceEngine() {
|
||||||
this.type = ""; // e.g. legacy (Legacy NLP), llm (LLM)
|
this.type = ""; // e.g. legacy (Legacy NLP), LLM based AI (LLM)
|
||||||
this.provider = "";
|
this.provider = "";
|
||||||
this.model = "";
|
this.model = "";
|
||||||
this.engineProfile = null;
|
this.engineProfile = null;
|
||||||
|
@ -435,7 +494,7 @@ exports.create = function() {
|
||||||
return new LanguageInferenceEngine();
|
return new LanguageInferenceEngine();
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.VERSIONINFO = "Language Inference Engine integration version 0.1.8";
|
exports.VERSIONINFO = "Language Inference Engine integration version 0.1.9";
|
||||||
exports.AUTHOR = "abuse@catswords.net";
|
exports.AUTHOR = "abuse@catswords.net";
|
||||||
exports.global = global;
|
exports.global = global;
|
||||||
exports.require = global.require;
|
exports.require = global.require;
|
||||||
|
|
Loading…
Reference in New Issue
Block a user