Hotfix tests after AI PR (#6124)
I merged https://github.com/twentyhq/twenty/pull/5788 too quickly and didn't noticed it was broken with https://github.com/twentyhq/twenty/pull/6069
This commit is contained in:
@ -371,7 +371,7 @@ export class EnvironmentVariables {
|
||||
|
||||
OPENROUTER_API_KEY: string;
|
||||
|
||||
LLM_CHAT_MODEL_DRIVER: LLMChatModelDriver = LLMChatModelDriver.OpenAI;
|
||||
LLM_CHAT_MODEL_DRIVER: LLMChatModelDriver;
|
||||
|
||||
OPENAI_API_KEY: string;
|
||||
|
||||
|
||||
@ -9,6 +9,6 @@ export interface LLMChatModelModuleOptions {
|
||||
}
|
||||
|
||||
export type LLMChatModelModuleAsyncOptions = {
|
||||
useFactory: (...args: any[]) => LLMChatModelModuleOptions;
|
||||
useFactory: (...args: any[]) => LLMChatModelModuleOptions | undefined;
|
||||
} & Pick<ModuleMetadata, 'imports'> &
|
||||
Pick<FactoryProvider, 'inject'>;
|
||||
|
||||
@ -12,8 +12,6 @@ export const llmChatModelModuleFactory = (
|
||||
return { type: LLMChatModelDriver.OpenAI };
|
||||
}
|
||||
default:
|
||||
throw new Error(
|
||||
`Invalid LLM chat model driver (${driver}), check your .env file`,
|
||||
);
|
||||
// `No LLM chat model driver (${driver})`);
|
||||
}
|
||||
};
|
||||
|
||||
@ -17,7 +17,7 @@ export class LLMChatModelModule {
|
||||
useFactory: (...args: any[]) => {
|
||||
const config = options.useFactory(...args);
|
||||
|
||||
switch (config.type) {
|
||||
switch (config?.type) {
|
||||
case LLMChatModelDriver.OpenAI: {
|
||||
return new OpenAIDriver();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user