diff --git a/lib/language-inference-engine.js b/lib/language-inference-engine.js index bc1fcf3..1f217e5 100644 --- a/lib/language-inference-engine.js +++ b/lib/language-inference-engine.js @@ -38,7 +38,8 @@ var engineProfiles = { "role": "user", "content": message }], - "temperature": temperature + "temperature": temperature, + "stream": false }; }, "callback": function(response) { @@ -75,7 +76,8 @@ var engineProfiles = { "content": message } ], - "temperature": temperature + "temperature": temperature, + "stream": false }; }, "callback": function(response) { @@ -115,7 +117,8 @@ var engineProfiles = { "content": message } ], - "temperature": temperature + "temperature": temperature, + "stream": false }; }, "callback": function(response) { @@ -151,7 +154,8 @@ var engineProfiles = { "content": message } ], - "temperature": temperature + "temperature": temperature, + "stream": false } }, "callback": function(response) { @@ -201,6 +205,37 @@ var engineProfiles = { } } }, + "mistral": { + "type": "llm", + "defaultModel": "ministral-8b-latest", + "url": "https://api.mistral.ai/v1/chat/completions", + "wrap": function(model, message, temperature) { + "model": model, + "messages": [ + { + "role": "system", + "content": biasMessage + }, + { + "role": "user", + "content": message + } + ], + "temperature": temperature, + "stream": false + }, + "callback": function(response) { + if ("error" in response) { + return ["Error: " + response.error.message]; + } else { + return response.choices.reduce(function(a, x) { + a.push(x.message.content); + + return a; + }, []); + } + } + }, "deepseek": { "type": "llm", "defaultModel": "deepseek-chat", @@ -223,17 +258,17 @@ var engineProfiles = { ], "temperature": temperature, "stream": false - } - }, - "callback": function(response) { - if ("error" in response) { - return ["Error: " + response.error.message]; - } else { - return response.choices.reduce(function(a, x) { - a.push(x.message.content); - - return a; - }, []); + }, + "callback": function(response) { + if ("error" in response) { + return ["Error: " + response.error.message]; + } else { + return response.choices.reduce(function(a, x) { + a.push(x.message.content); + + return a; + }, []); + } } } }; @@ -299,7 +334,7 @@ exports.create = function() { return new LanguageInferenceEngine(); }; -exports.VERSIONINFO = "Language Inference Engine (NLP/LLM) integration version 0.1.2"; +exports.VERSIONINFO = "Language Inference Engine integration version 0.1.3"; exports.AUTHOR = "abuse@catswords.net"; exports.global = global; exports.require = global.require;