2025-01-27 14:48:34 +00:00
// language-inference-engine.js
// Language Inference Engine (e.g., NLP, LLM) services integration
// Namhyeon Go <abuse@catswords.net>
// https://github.com/gnh1201/welsonjs
// ***SECURITY NOTICE***
// Language Inference Engine requires an internet connection, and data may be transmitted externally. Users must adhere to the terms of use and privacy policy.
// - OpenAI: https://openai.com/policies/row-privacy-policy/
// - Anthropic: https://www.anthropic.com/legal/privacy
// - Groq: https://groq.com/privacy-policy/
2025-01-29 08:41:29 +00:00
// - Mistral: https://mistral.ai/terms/
2025-01-27 14:48:34 +00:00
// - xAI: https://x.ai/legal/privacy-policy
2025-01-27 15:02:22 +00:00
// - Google Gemini: https://developers.google.com/idx/support/privacy
2025-01-27 14:48:34 +00:00
// - DeepSeek: https://chat.deepseek.com/downloads/DeepSeek%20Privacy%20Policy.html
2025-01-29 08:41:29 +00:00
// - Moonshot: https://kimi.moonshot.cn/user/agreement/userPrivacy
2025-01-27 15:02:22 +00:00
//
2025-01-27 14:48:34 +00:00
var HTTP = require ( "lib/http" ) ;
var CRED = require ( "lib/credentials" ) ;
2025-01-29 09:24:32 +00:00
var BIAS _MESSAGE = "Write all future code examples in JavaScript ES3 using the exports variable. " +
2025-01-27 14:48:34 +00:00
"Include a test method with the fixed name test. " +
"Respond exclusively in code without blocks." ;
2025-01-29 09:24:32 +00:00
var ENGINE _PROFILES = {
2025-01-27 14:48:34 +00:00
"openai" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"gpt-4o-mini"
] ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.openai.com/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [ {
"role" : "developer" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-27 14:48:34 +00:00
} , {
"role" : "user" ,
"content" : message
2025-01-28 00:38:23 +00:00
} ] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"anthropic" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"claude-3-5-sonnet-20241022"
] ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"x-api-key" : "{apikey}" ,
"anthropic-version" : "2023-06-01"
} ,
"url" : "https://api.anthropic.com/v1/messages" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-27 14:48:34 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
2025-01-28 00:38:23 +00:00
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . content . reduce ( function ( a , x ) {
if ( x . type == "text" ) {
a . push ( x . text ) ;
} else {
a . push ( "Not supported type: " + x . type ) ;
}
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"groq" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"llama-3.1-8b-instant"
] ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.groq.com/openai/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-27 14:48:34 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
2025-01-28 00:38:23 +00:00
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"xai" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"grok-2-latest"
] ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.x.ai/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
2025-01-28 00:38:23 +00:00
"model" : model ,
2025-01-27 14:48:34 +00:00
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-27 14:48:34 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
}
} ,
"callback" : function ( response ) {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
} ,
2025-01-27 15:02:22 +00:00
"google" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"gemini-1.5-flash"
] ,
2025-01-27 15:02:22 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}" ,
2025-01-28 00:38:23 +00:00
"warp" : function ( model , message , temperature ) {
2025-01-27 15:02:22 +00:00
return {
"contents" : [
{
"parts" : [
{
"text" : message
}
]
}
]
}
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . candidates . reduce ( function ( a , x ) {
x . content . parts . forEach ( function ( part ) {
if ( "text" in part ) {
a . push ( part . text ) ;
} else {
a . push ( "Not supported type" ) ;
}
} ) ;
return a ;
} , [ ] ) ;
}
}
} ,
2025-01-29 08:22:55 +00:00
"mistral" : {
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"ministral-8b-latest"
] ,
2025-01-29 08:22:55 +00:00
"url" : "https://api.mistral.ai/v1/chat/completions" ,
"wrap" : function ( model , message , temperature ) {
"model" : model ,
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-29 08:22:55 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
] ,
"temperature" : temperature ,
"stream" : false
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
return a ;
} , [ ] ) ;
}
}
} ,
2025-01-27 14:48:34 +00:00
"deepseek" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"deepseek-chat"
] ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.deepseek.com/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
"model" : model ,
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-27 14:48:34 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
] ,
2025-01-28 00:38:23 +00:00
"temperature" : temperature ,
2025-01-27 14:48:34 +00:00
"stream" : false
2025-01-29 08:22:55 +00:00
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-29 08:41:29 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"moonshot" : {
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"moonshot-v1-8k"
] ,
2025-01-29 08:41:29 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.moonshot.cn/v1" ,
"wrap" : function ( model , message , temperature ) {
"model" : model ,
"messages" : [
{
"role" : "system" ,
2025-01-29 09:24:32 +00:00
"content" : BIAS _MESSAGE
2025-01-29 08:41:29 +00:00
} ,
{
"role" : "user" ,
"content" : message
}
] ,
"temperature" : temperature ,
"stream" : false
} ,
2025-01-30 08:12:22 +00:00
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
return a ;
} , [ ] ) ;
}
}
} ,
2025-01-30 14:28:44 +00:00
"catswords-ai" : {
2025-01-30 08:12:22 +00:00
"type" : "llm" ,
2025-01-30 09:59:04 +00:00
"availableModels" : [
"openchat-3.5-0106" ,
"qwen1.5-14b-chat-awq" ,
2025-01-30 10:47:30 +00:00
"gemma-7b-it" ,
"una-cybertron-7b-v2-bf16" ,
2025-01-30 14:15:58 +00:00
"starling-lm-7b-beta" ,
"hermes-2-pro-mistral-7b"
2025-01-30 09:59:04 +00:00
] ,
2025-01-30 08:12:22 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
2025-01-30 14:28:28 +00:00
"url" : "https://ai.catswords.net" ,
2025-01-30 08:12:22 +00:00
"wrap" : function ( model , message , temperature ) {
"model" : model ,
"messages" : [
{
"role" : "system" ,
"content" : BIAS _MESSAGE
} ,
{
"role" : "user" ,
"content" : message
}
] ,
"temperature" : temperature ,
"stream" : false
} ,
2025-01-29 08:41:29 +00:00
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-29 08:22:55 +00:00
return a ;
} , [ ] ) ;
}
2025-01-27 14:48:34 +00:00
}
}
} ;
function LanguageInferenceEngine ( ) {
2025-01-28 00:46:45 +00:00
this . type = "" ; // e.g. legacy (Legacy NLP), llm (LLM)
2025-01-27 14:48:34 +00:00
this . provider = "" ;
2025-01-28 00:38:23 +00:00
this . model = "" ;
2025-01-27 14:48:34 +00:00
this . engineProfile = null ;
this . setProvider = function ( provider ) {
this . provider = provider ;
2025-01-30 10:47:30 +00:00
2025-01-29 09:24:32 +00:00
if ( this . provider in ENGINE _PROFILES ) {
this . engineProfile = ENGINE _PROFILES [ provider ] ;
2025-01-28 00:46:45 +00:00
this . type = this . engineProfile . type ;
2025-01-30 09:59:04 +00:00
this . model = this . engineProfile . availableModels [ 0 ] ;
2025-01-27 14:48:34 +00:00
}
2025-01-30 10:47:30 +00:00
2025-01-27 14:48:34 +00:00
return this ;
} ;
this . setModel = function ( model ) {
this . model = model ;
return this ;
} ;
this . setEngineProfileURL = function ( url ) {
if ( this . engineProfile == null )
return this ;
this . engineProfile . url = url ;
return this ;
}
2025-01-28 00:38:23 +00:00
this . inference = function ( message , temperature ) {
2025-01-27 14:48:34 +00:00
if ( this . engineProfile == null )
return this ;
var apikey = CRED . get ( "apikey" , this . provider ) ; // Get API key
var headers = this . engineProfile . headers ;
var wrap = this . engineProfile . wrap ;
var url = this . engineProfile . url ;
var callback = this . engineProfile . callback ;
2025-01-27 15:02:22 +00:00
var response = HTTP . create ( "MSXML" )
2025-01-27 14:48:34 +00:00
. setVariables ( {
"apikey" : apikey
} )
. setHeaders ( headers )
2025-01-28 00:38:23 +00:00
. setRequestBody ( wrap ( this . model , message , temperature ) )
2025-01-27 14:48:34 +00:00
. open ( "post" , url )
. send ( )
2025-01-27 15:02:22 +00:00
. responseBody ;
return callback ( response ) ;
2025-01-27 14:48:34 +00:00
} ;
}
2025-01-29 09:24:32 +00:00
exports . setBiasMessage = function ( biasMessage ) {
BIAS _MESSAGE = biasMessage ;
} ;
2025-01-27 14:48:34 +00:00
exports . LanguageInferenceEngine = LanguageInferenceEngine ;
exports . create = function ( ) {
return new LanguageInferenceEngine ( ) ;
} ;
2025-01-30 14:33:35 +00:00
exports . VERSIONINFO = "Language Inference Engine integration version 0.1.8" ;
2025-01-27 14:48:34 +00:00
exports . AUTHOR = "abuse@catswords.net" ;
exports . global = global ;
exports . require = global . require ;