6658 workflows add a first twenty piece email sender (#6965)
This commit is contained in:
@ -0,0 +1,5 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
|
||||
export interface LLMChatModelDriver {
|
||||
getJSONChatModel(): BaseChatModel;
|
||||
}
|
||||
@ -0,0 +1,22 @@
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
|
||||
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/drivers/interfaces/llm-prompt-template-driver.interface';
|
||||
|
||||
export class OpenAIDriver implements LLMChatModelDriver {
|
||||
private chatModel: BaseChatModel;
|
||||
|
||||
constructor() {
|
||||
this.chatModel = new ChatOpenAI({
|
||||
model: 'gpt-4o',
|
||||
}).bind({
|
||||
response_format: {
|
||||
type: 'json_object',
|
||||
},
|
||||
}) as unknown as BaseChatModel;
|
||||
}
|
||||
|
||||
getJSONChatModel() {
|
||||
return this.chatModel;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,14 @@
|
||||
import { ModuleMetadata, FactoryProvider } from '@nestjs/common';
|
||||
|
||||
export enum LLMChatModelDriver {
|
||||
OpenAI = 'openai',
|
||||
}
|
||||
|
||||
export interface LLMChatModelModuleOptions {
|
||||
type: LLMChatModelDriver;
|
||||
}
|
||||
|
||||
export type LLMChatModelModuleAsyncOptions = {
|
||||
useFactory: (...args: any[]) => LLMChatModelModuleOptions | undefined;
|
||||
} & Pick<ModuleMetadata, 'imports'> &
|
||||
Pick<FactoryProvider, 'inject'>;
|
||||
@ -0,0 +1 @@
|
||||
export const LLM_CHAT_MODEL_DRIVER = Symbol('LLM_CHAT_MODEL_DRIVER');
|
||||
@ -0,0 +1,17 @@
|
||||
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/interfaces/llm-chat-model.interface';
|
||||
|
||||
import { EnvironmentService } from 'src/engine/core-modules/environment/environment.service';
|
||||
|
||||
export const llmChatModelModuleFactory = (
|
||||
environmentService: EnvironmentService,
|
||||
) => {
|
||||
const driver = environmentService.get('LLM_CHAT_MODEL_DRIVER');
|
||||
|
||||
switch (driver) {
|
||||
case LLMChatModelDriver.OpenAI: {
|
||||
return { type: LLMChatModelDriver.OpenAI };
|
||||
}
|
||||
default:
|
||||
// `No LLM chat model driver (${driver})`);
|
||||
}
|
||||
};
|
||||
@ -0,0 +1,35 @@
|
||||
import { DynamicModule, Global } from '@nestjs/common';
|
||||
|
||||
import {
|
||||
LLMChatModelDriver,
|
||||
LLMChatModelModuleAsyncOptions,
|
||||
} from 'src/engine/core-modules/llm-chat-model/interfaces/llm-chat-model.interface';
|
||||
|
||||
import { LLM_CHAT_MODEL_DRIVER } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.constants';
|
||||
import { OpenAIDriver } from 'src/engine/core-modules/llm-chat-model/drivers/openai.driver';
|
||||
import { LLMChatModelService } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.service';
|
||||
|
||||
@Global()
|
||||
export class LLMChatModelModule {
|
||||
static forRoot(options: LLMChatModelModuleAsyncOptions): DynamicModule {
|
||||
const provider = {
|
||||
provide: LLM_CHAT_MODEL_DRIVER,
|
||||
useFactory: (...args: any[]) => {
|
||||
const config = options.useFactory(...args);
|
||||
|
||||
switch (config?.type) {
|
||||
case LLMChatModelDriver.OpenAI: {
|
||||
return new OpenAIDriver();
|
||||
}
|
||||
}
|
||||
},
|
||||
inject: options.inject || [],
|
||||
};
|
||||
|
||||
return {
|
||||
module: LLMChatModelModule,
|
||||
providers: [LLMChatModelService, provider],
|
||||
exports: [LLMChatModelService],
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
import { Injectable, Inject } from '@nestjs/common';
|
||||
|
||||
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/drivers/interfaces/llm-prompt-template-driver.interface';
|
||||
|
||||
import { LLM_CHAT_MODEL_DRIVER } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.constants';
|
||||
|
||||
@Injectable()
|
||||
export class LLMChatModelService {
|
||||
constructor(
|
||||
@Inject(LLM_CHAT_MODEL_DRIVER) private driver: LLMChatModelDriver,
|
||||
) {}
|
||||
|
||||
getJSONChatModel() {
|
||||
return this.driver.getJSONChatModel();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user