6658 workflows add a first twenty piece email sender (#6965)

This commit is contained in:
martmull
2024-09-12 11:00:25 +02:00
committed by GitHub
parent f8e5b333d9
commit 3190f4a87b
397 changed files with 1143 additions and 1037 deletions

View File

@ -0,0 +1,5 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
export interface LLMChatModelDriver {
getJSONChatModel(): BaseChatModel;
}

View File

@ -0,0 +1,22 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatOpenAI } from '@langchain/openai';
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/drivers/interfaces/llm-prompt-template-driver.interface';
export class OpenAIDriver implements LLMChatModelDriver {
private chatModel: BaseChatModel;
constructor() {
this.chatModel = new ChatOpenAI({
model: 'gpt-4o',
}).bind({
response_format: {
type: 'json_object',
},
}) as unknown as BaseChatModel;
}
getJSONChatModel() {
return this.chatModel;
}
}

View File

@ -0,0 +1,14 @@
import { ModuleMetadata, FactoryProvider } from '@nestjs/common';
export enum LLMChatModelDriver {
OpenAI = 'openai',
}
export interface LLMChatModelModuleOptions {
type: LLMChatModelDriver;
}
export type LLMChatModelModuleAsyncOptions = {
useFactory: (...args: any[]) => LLMChatModelModuleOptions | undefined;
} & Pick<ModuleMetadata, 'imports'> &
Pick<FactoryProvider, 'inject'>;

View File

@ -0,0 +1 @@
export const LLM_CHAT_MODEL_DRIVER = Symbol('LLM_CHAT_MODEL_DRIVER');

View File

@ -0,0 +1,17 @@
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/interfaces/llm-chat-model.interface';
import { EnvironmentService } from 'src/engine/core-modules/environment/environment.service';
export const llmChatModelModuleFactory = (
environmentService: EnvironmentService,
) => {
const driver = environmentService.get('LLM_CHAT_MODEL_DRIVER');
switch (driver) {
case LLMChatModelDriver.OpenAI: {
return { type: LLMChatModelDriver.OpenAI };
}
default:
// `No LLM chat model driver (${driver})`);
}
};

View File

@ -0,0 +1,35 @@
import { DynamicModule, Global } from '@nestjs/common';
import {
LLMChatModelDriver,
LLMChatModelModuleAsyncOptions,
} from 'src/engine/core-modules/llm-chat-model/interfaces/llm-chat-model.interface';
import { LLM_CHAT_MODEL_DRIVER } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.constants';
import { OpenAIDriver } from 'src/engine/core-modules/llm-chat-model/drivers/openai.driver';
import { LLMChatModelService } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.service';
@Global()
export class LLMChatModelModule {
static forRoot(options: LLMChatModelModuleAsyncOptions): DynamicModule {
const provider = {
provide: LLM_CHAT_MODEL_DRIVER,
useFactory: (...args: any[]) => {
const config = options.useFactory(...args);
switch (config?.type) {
case LLMChatModelDriver.OpenAI: {
return new OpenAIDriver();
}
}
},
inject: options.inject || [],
};
return {
module: LLMChatModelModule,
providers: [LLMChatModelService, provider],
exports: [LLMChatModelService],
};
}
}

View File

@ -0,0 +1,16 @@
import { Injectable, Inject } from '@nestjs/common';
import { LLMChatModelDriver } from 'src/engine/core-modules/llm-chat-model/drivers/interfaces/llm-prompt-template-driver.interface';
import { LLM_CHAT_MODEL_DRIVER } from 'src/engine/core-modules/llm-chat-model/llm-chat-model.constants';
@Injectable()
export class LLMChatModelService {
constructor(
@Inject(LLM_CHAT_MODEL_DRIVER) private driver: LLMChatModelDriver,
) {}
getJSONChatModel() {
return this.driver.getJSONChatModel();
}
}