Files
twenty_crm/packages/twenty-server/src/engine/integrations/llm-tracing/llm-tracing.module.ts
ad-elias 4c642a0bb8 Text-to-SQL proof of concept (#5788)
Added:
- An "Ask AI" command to the command menu.
- A simple GraphQL resolver that converts the user's question into a
relevant SQL query using an LLM, runs the query, and returns the result.

<img width="428" alt="Screenshot 2024-06-09 at 20 53 09"
src="https://github.com/twentyhq/twenty/assets/171685816/57127f37-d4a6-498d-b253-733ffa0d209f">

No security concerns have been addressed, this is only a
proof-of-concept and not intended to be enabled in production.

All changes are behind a feature flag called `IS_ASK_AI_ENABLED`.

---------

Co-authored-by: Félix Malfait <felix.malfait@gmail.com>
2024-07-04 08:57:26 +02:00

40 lines
1.3 KiB
TypeScript

import { Global, DynamicModule } from '@nestjs/common';
import {
LLMTracingModuleAsyncOptions,
LLMTracingDriver,
} from 'src/engine/integrations/llm-tracing/interfaces/llm-tracing.interface';
import { LangfuseDriver } from 'src/engine/integrations/llm-tracing/drivers/langfuse.driver';
import { ConsoleDriver } from 'src/engine/integrations/llm-tracing/drivers/console.driver';
import { LLMTracingService } from 'src/engine/integrations/llm-tracing/llm-tracing.service';
import { LLM_TRACING_DRIVER } from 'src/engine/integrations/llm-tracing/llm-tracing.constants';
@Global()
export class LLMTracingModule {
static forRoot(options: LLMTracingModuleAsyncOptions): DynamicModule {
const provider = {
provide: LLM_TRACING_DRIVER,
useFactory: (...args: any[]) => {
const config = options.useFactory(...args);
switch (config.type) {
case LLMTracingDriver.Langfuse: {
return new LangfuseDriver(config.options);
}
case LLMTracingDriver.Console: {
return new ConsoleDriver();
}
}
},
inject: options.inject || [],
};
return {
module: LLMTracingModule,
providers: [LLMTracingService, provider],
exports: [LLMTracingService],
};
}
}