2025-01-27 14:48:34 +00:00
// language-inference-engine.js
// Language Inference Engine (e.g., NLP, LLM) services integration
// Namhyeon Go <abuse@catswords.net>
// https://github.com/gnh1201/welsonjs
// ***SECURITY NOTICE***
// Language Inference Engine requires an internet connection, and data may be transmitted externally. Users must adhere to the terms of use and privacy policy.
// - OpenAI: https://openai.com/policies/row-privacy-policy/
// - Anthropic: https://www.anthropic.com/legal/privacy
// - Groq: https://groq.com/privacy-policy/
// - xAI: https://x.ai/legal/privacy-policy
2025-01-27 15:02:22 +00:00
// - Google Gemini: https://developers.google.com/idx/support/privacy
2025-01-27 14:48:34 +00:00
// - DeepSeek: https://chat.deepseek.com/downloads/DeepSeek%20Privacy%20Policy.html
2025-01-27 15:02:22 +00:00
//
2025-01-27 14:48:34 +00:00
var HTTP = require ( "lib/http" ) ;
var CRED = require ( "lib/credentials" ) ;
var biasMessage = "Write all future code examples in JavaScript ES3 using the exports variable. " +
"Include a test method with the fixed name test. " +
"Respond exclusively in code without blocks." ;
var engineProfiles = {
"openai" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "gpt-4o-mini" ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.openai.com/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [ {
"role" : "developer" ,
"content" : biasMessage
} , {
"role" : "user" ,
"content" : message
2025-01-28 00:38:23 +00:00
} ] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"anthropic" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "claude-3-5-sonnet-20241022" ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"x-api-key" : "{apikey}" ,
"anthropic-version" : "2023-06-01"
} ,
"url" : "https://api.anthropic.com/v1/messages" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [
{
"role" : "system" ,
"content" : biasMessage
} ,
{
"role" : "user" ,
"content" : message
}
2025-01-28 00:38:23 +00:00
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . content . reduce ( function ( a , x ) {
if ( x . type == "text" ) {
a . push ( x . text ) ;
} else {
a . push ( "Not supported type: " + x . type ) ;
}
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"groq" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "llama-3.1-8b-instant" ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.groq.com/openai/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
"model" : model ,
"messages" : [
{
"role" : "system" ,
"content" : biasMessage
} ,
{
"role" : "user" ,
"content" : message
}
2025-01-28 00:38:23 +00:00
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
} ;
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
}
} ,
"xai" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "grok-2-latest" ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.x.ai/v1/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
return {
2025-01-28 00:38:23 +00:00
"model" : model ,
2025-01-27 14:48:34 +00:00
"messages" : [
{
"role" : "system" ,
"content" : biasMessage
} ,
{
"role" : "user" ,
"content" : message
}
] ,
2025-01-29 08:22:55 +00:00
"temperature" : temperature ,
"stream" : false
2025-01-27 14:48:34 +00:00
}
} ,
"callback" : function ( response ) {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
2025-01-27 15:02:22 +00:00
2025-01-27 14:48:34 +00:00
return a ;
} , [ ] ) ;
}
} ,
2025-01-27 15:02:22 +00:00
"google" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "gemini-1.5-flash" ,
2025-01-27 15:02:22 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apikey}" ,
2025-01-28 00:38:23 +00:00
"warp" : function ( model , message , temperature ) {
2025-01-27 15:02:22 +00:00
return {
"contents" : [
{
"parts" : [
{
"text" : message
}
]
}
]
}
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . candidates . reduce ( function ( a , x ) {
x . content . parts . forEach ( function ( part ) {
if ( "text" in part ) {
a . push ( part . text ) ;
} else {
a . push ( "Not supported type" ) ;
}
} ) ;
return a ;
} , [ ] ) ;
}
}
} ,
2025-01-29 08:22:55 +00:00
"mistral" : {
"type" : "llm" ,
"defaultModel" : "ministral-8b-latest" ,
"url" : "https://api.mistral.ai/v1/chat/completions" ,
"wrap" : function ( model , message , temperature ) {
"model" : model ,
"messages" : [
{
"role" : "system" ,
"content" : biasMessage
} ,
{
"role" : "user" ,
"content" : message
}
] ,
"temperature" : temperature ,
"stream" : false
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
return a ;
} , [ ] ) ;
}
}
} ,
2025-01-27 14:48:34 +00:00
"deepseek" : {
2025-01-28 00:46:45 +00:00
"type" : "llm" ,
2025-01-28 00:38:23 +00:00
"defaultModel" : "deepseek-chat" ,
2025-01-27 14:48:34 +00:00
"headers" : {
"Content-Type" : "application/json" ,
"Authorization" : "Bearer {apikey}"
} ,
"url" : "https://api.deepseek.com/chat/completions" ,
2025-01-28 00:38:23 +00:00
"wrap" : function ( model , message , temperature ) {
2025-01-27 14:48:34 +00:00
"model" : model ,
"messages" : [
{
"role" : "system" ,
"content" : biasMessage
} ,
{
"role" : "user" ,
"content" : message
}
] ,
2025-01-28 00:38:23 +00:00
"temperature" : temperature ,
2025-01-27 14:48:34 +00:00
"stream" : false
2025-01-29 08:22:55 +00:00
} ,
"callback" : function ( response ) {
if ( "error" in response ) {
return [ "Error: " + response . error . message ] ;
} else {
return response . choices . reduce ( function ( a , x ) {
a . push ( x . message . content ) ;
return a ;
} , [ ] ) ;
}
2025-01-27 14:48:34 +00:00
}
}
} ;
function LanguageInferenceEngine ( ) {
2025-01-28 00:46:45 +00:00
this . type = "" ; // e.g. legacy (Legacy NLP), llm (LLM)
2025-01-27 14:48:34 +00:00
this . provider = "" ;
2025-01-28 00:38:23 +00:00
this . model = "" ;
2025-01-27 14:48:34 +00:00
this . engineProfile = null ;
this . setProvider = function ( provider ) {
this . provider = provider ;
2025-01-28 00:46:45 +00:00
if ( this . provider in engineProfiles ) {
2025-01-27 14:48:34 +00:00
this . engineProfile = engineProfiles [ provider ] ;
2025-01-28 00:46:45 +00:00
this . type = this . engineProfile . type ;
2025-01-28 00:38:23 +00:00
this . model = this . engineProfile . defaultModel ;
2025-01-27 14:48:34 +00:00
}
return this ;
} ;
this . setModel = function ( model ) {
this . model = model ;
return this ;
} ;
this . setEngineProfileURL = function ( url ) {
if ( this . engineProfile == null )
return this ;
this . engineProfile . url = url ;
return this ;
}
2025-01-28 00:38:23 +00:00
this . inference = function ( message , temperature ) {
2025-01-27 14:48:34 +00:00
if ( this . engineProfile == null )
return this ;
var apikey = CRED . get ( "apikey" , this . provider ) ; // Get API key
var headers = this . engineProfile . headers ;
var wrap = this . engineProfile . wrap ;
var url = this . engineProfile . url ;
var callback = this . engineProfile . callback ;
2025-01-27 15:02:22 +00:00
var response = HTTP . create ( "MSXML" )
2025-01-27 14:48:34 +00:00
. setVariables ( {
"apikey" : apikey
} )
. setHeaders ( headers )
2025-01-28 00:38:23 +00:00
. setRequestBody ( wrap ( this . model , message , temperature ) )
2025-01-27 14:48:34 +00:00
. open ( "post" , url )
. send ( )
2025-01-27 15:02:22 +00:00
. responseBody ;
return callback ( response ) ;
2025-01-27 14:48:34 +00:00
} ;
}
exports . LanguageInferenceEngine = LanguageInferenceEngine ;
exports . create = function ( ) {
return new LanguageInferenceEngine ( ) ;
} ;
2025-01-29 08:22:55 +00:00
exports . VERSIONINFO = "Language Inference Engine integration version 0.1.3" ;
2025-01-27 14:48:34 +00:00
exports . AUTHOR = "abuse@catswords.net" ;
exports . global = global ;
exports . require = global . require ;