feat: add integration tests (#6923)
### Summary This PR introduces several integration tests, a mix of manually written tests and those generated using the `generate-integration-tests` Python script located in the `scripts` folder. ### Tests Added: - **Authentication tests**: Validating login, registration, and token handling. - **FindMany queries**: Fetching multiple records for all existing entities that do not require input arguments. ### How the Integration Tests Work: - A `setupTest` function is called during the Jest test run. This function initializes a test instance of the application and exposes it on a dedicated port. - Since tests are executed in isolated workers, they do not have direct access to the in-memory app instance. Instead, the tests query the application through the exposed port. - A static accessToken is used, this one as a big expiration time so it will never expire (365 years) - The queries are executed, and the results are validated against expected outcomes. ### Current State and Next Steps: - These tests currently run using the existing development seed data. We plan to introduce more comprehensive test data using `faker` to improve coverage. - At the moment, the only mutation tests implemented are for authentication. Future updates should include broader mutation testing for other entities. --------- Co-authored-by: Charles Bochet <charles@twenty.com>
This commit is contained in:
@ -0,0 +1,213 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as process from 'process';
|
||||
|
||||
import { INTROSPECTION_QUERY } from './introspection-query';
|
||||
import {
|
||||
Field,
|
||||
InputValue,
|
||||
IntrospectionResponse,
|
||||
TypeRef,
|
||||
} from './introspection.interface';
|
||||
|
||||
const GRAPHQL_URL = 'http://localhost:3000/graphql';
|
||||
const BEARER_TOKEN =
|
||||
'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIyMDIwMjAyMC05ZTNiLTQ2ZDQtYTU1Ni04OGI5ZGRjMmIwMzQiLCJ3b3Jrc3BhY2VJZCI6IjIwMjAyMDIwLTFjMjUtNGQwMi1iZjI1LTZhZWNjZjdlYTQxOSIsIndvcmtzcGFjZU1lbWJlcklkIjoiMjAyMDIwMjAtMDY4Ny00YzQxLWI3MDctZWQxYmZjYTk3MmE3IiwiaWF0IjoxNzI2NDkyNTAyLCJleHAiOjEzMjQ1MDE2NTAyfQ.zM6TbfeOqYVH5Sgryc2zf02hd9uqUOSL1-iJlMgwzsI';
|
||||
const TEST_OUTPUT_DIR = './test';
|
||||
|
||||
const fetchGraphQLSchema = async (): Promise<IntrospectionResponse> => {
|
||||
const headers = {
|
||||
Authorization: BEARER_TOKEN,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
const response = await fetch(GRAPHQL_URL, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({ query: INTROSPECTION_QUERY }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch schema: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
const toKebabCase = (name: string): string => {
|
||||
return name.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
|
||||
};
|
||||
|
||||
const unwrapType = (typeInfo: TypeRef): any => {
|
||||
while (typeInfo.ofType) {
|
||||
typeInfo = typeInfo.ofType;
|
||||
}
|
||||
|
||||
return typeInfo;
|
||||
};
|
||||
|
||||
const hasRequiredArgs = (args: InputValue[]): boolean => {
|
||||
return args.some((arg) => unwrapType(arg.type).kind === 'NON_NULL');
|
||||
};
|
||||
|
||||
const generateTestContent = (
|
||||
queryName: string,
|
||||
fields: Field[],
|
||||
): string | null => {
|
||||
const fieldNames = fields
|
||||
.filter((f) => ['SCALAR', 'ENUM'].includes(unwrapType(f.type).kind))
|
||||
.map((f) => f.name);
|
||||
|
||||
if (fieldNames.length === 0) {
|
||||
console.log(`Skipping ${queryName}: No usable fields found.`);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const fieldSelection = fieldNames.join('\n ');
|
||||
const expectSelection = fieldNames
|
||||
.map((f) => `expect(${queryName}).toHaveProperty('${f}');`)
|
||||
.join('\n ');
|
||||
|
||||
return `import request from 'supertest';
|
||||
|
||||
const client = request(\`http://localhost:\${APP_PORT}\`);
|
||||
|
||||
describe('${queryName}Resolver (e2e)', () => {
|
||||
it('should find many ${queryName}', () => {
|
||||
const queryData = {
|
||||
query: \`
|
||||
query ${queryName} {
|
||||
${queryName} {
|
||||
edges {
|
||||
node {
|
||||
${fieldSelection}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
\`,
|
||||
};
|
||||
|
||||
return client
|
||||
.post('/graphql')
|
||||
.set('Authorization', \`Bearer \${ACCESS_TOKEN}\`)
|
||||
.send(queryData)
|
||||
.expect(200)
|
||||
.expect((res) => {
|
||||
expect(res.body.data).toBeDefined();
|
||||
expect(res.body.errors).toBeUndefined();
|
||||
})
|
||||
.expect((res) => {
|
||||
const data = res.body.data.${queryName};
|
||||
|
||||
expect(data).toBeDefined();
|
||||
expect(Array.isArray(data.edges)).toBe(true);
|
||||
|
||||
const edges = data.edges;
|
||||
|
||||
if (edges.length > 0) {
|
||||
const ${queryName} = edges[0].node;
|
||||
|
||||
${expectSelection}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
`;
|
||||
};
|
||||
|
||||
const writeTestFile = (
|
||||
queryName: string,
|
||||
content: string | null,
|
||||
force = false,
|
||||
): string => {
|
||||
if (!content) return 'skipped';
|
||||
|
||||
const fileName = `${toKebabCase(queryName)}.integration-spec.ts`;
|
||||
const filePath = path.join(TEST_OUTPUT_DIR, fileName);
|
||||
|
||||
if (fs.existsSync(filePath) && !force) {
|
||||
return 'skipped';
|
||||
}
|
||||
|
||||
fs.writeFileSync(filePath, content);
|
||||
|
||||
return force ? 'updated' : 'created';
|
||||
};
|
||||
|
||||
const generateTests = async (force = false) => {
|
||||
fs.mkdirSync(TEST_OUTPUT_DIR, { recursive: true });
|
||||
const schemaData = await fetchGraphQLSchema();
|
||||
const types = schemaData.data.__schema.types;
|
||||
|
||||
const queryTypeName = schemaData.data.__schema.queryType.name;
|
||||
const queryType = types.find((t: any) => t.name === queryTypeName);
|
||||
|
||||
let createdCount = 0;
|
||||
let updatedCount = 0;
|
||||
let totalCount = 0;
|
||||
|
||||
if (!queryType?.fields) {
|
||||
console.log('No query fields found.');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (const query of queryType.fields) {
|
||||
const queryName = query.name;
|
||||
|
||||
if (hasRequiredArgs(query.args)) continue;
|
||||
if (queryName.includes('Duplicates')) continue;
|
||||
|
||||
const queryReturnType = unwrapType(query.type);
|
||||
|
||||
if (
|
||||
queryReturnType.kind === 'OBJECT' &&
|
||||
queryReturnType.name.includes('Connection')
|
||||
) {
|
||||
totalCount++;
|
||||
const connectionTypeInfo = types.find(
|
||||
(f: any) => f.name === queryReturnType.name,
|
||||
);
|
||||
const edgeTypeInfo = connectionTypeInfo?.fields?.find(
|
||||
(f: any) => f.name === 'edges',
|
||||
);
|
||||
|
||||
if (edgeTypeInfo) {
|
||||
const returnType = unwrapType(edgeTypeInfo.type);
|
||||
const returnTypeInfo = types.find(
|
||||
(t: any) => t.name === returnType.name,
|
||||
);
|
||||
const returnNodeTypeInfo = returnTypeInfo?.fields?.find(
|
||||
(f: any) => f.name === 'node',
|
||||
);
|
||||
|
||||
if (returnNodeTypeInfo) {
|
||||
const nodeType = unwrapType(returnNodeTypeInfo.type);
|
||||
const nodeTypeInfo = types.find((t: any) => t.name === nodeType.name);
|
||||
|
||||
if (!nodeTypeInfo?.fields) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = generateTestContent(queryName, nodeTypeInfo?.fields);
|
||||
const result = writeTestFile(queryName, content, force);
|
||||
|
||||
if (result === 'created') createdCount++;
|
||||
if (result === 'updated') updatedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Number of tests created: ${createdCount}/${totalCount}`);
|
||||
if (force) {
|
||||
console.log(`Number of tests updated: ${updatedCount}/${totalCount}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Basic command-line argument parsing
|
||||
const forceArg = process.argv.includes('--force');
|
||||
|
||||
// Call the function with the parsed argument
|
||||
generateTests(forceArg);
|
||||
@ -0,0 +1,89 @@
|
||||
export const INTROSPECTION_QUERY = `
|
||||
query IntrospectionQuery {
|
||||
__schema {
|
||||
queryType { name }
|
||||
mutationType { name }
|
||||
subscriptionType { name }
|
||||
types {
|
||||
...FullType
|
||||
}
|
||||
directives {
|
||||
name
|
||||
description
|
||||
locations
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment FullType on __Type {
|
||||
kind
|
||||
name
|
||||
description
|
||||
fields(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
type {
|
||||
...TypeRef
|
||||
}
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
inputFields {
|
||||
...InputValue
|
||||
}
|
||||
interfaces {
|
||||
...TypeRef
|
||||
}
|
||||
enumValues(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
possibleTypes {
|
||||
...TypeRef
|
||||
}
|
||||
}
|
||||
|
||||
fragment InputValue on __InputValue {
|
||||
name
|
||||
description
|
||||
type { ...TypeRef }
|
||||
defaultValue
|
||||
}
|
||||
|
||||
fragment TypeRef on __Type {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
@ -0,0 +1,60 @@
|
||||
export interface IntrospectionResponse {
|
||||
data: {
|
||||
__schema: Schema;
|
||||
};
|
||||
}
|
||||
|
||||
export interface Schema {
|
||||
queryType: { name: string };
|
||||
mutationType: { name: string | null };
|
||||
subscriptionType: { name: string | null };
|
||||
types: GraphQLType[];
|
||||
directives: Directive[];
|
||||
}
|
||||
|
||||
export interface Directive {
|
||||
name: string;
|
||||
description: string | null;
|
||||
locations: string[];
|
||||
args: InputValue[];
|
||||
}
|
||||
|
||||
export interface GraphQLType {
|
||||
kind: string;
|
||||
name: string;
|
||||
description: string | null;
|
||||
fields?: Field[];
|
||||
inputFields?: InputValue[];
|
||||
interfaces?: TypeRef[];
|
||||
enumValues?: EnumValue[];
|
||||
possibleTypes?: TypeRef[];
|
||||
}
|
||||
|
||||
export interface Field {
|
||||
name: string;
|
||||
description: string | null;
|
||||
args: InputValue[];
|
||||
type: TypeRef;
|
||||
isDeprecated: boolean;
|
||||
deprecationReason: string | null;
|
||||
}
|
||||
|
||||
export interface InputValue {
|
||||
name: string;
|
||||
description: string | null;
|
||||
type: TypeRef;
|
||||
defaultValue: string | null;
|
||||
}
|
||||
|
||||
export interface TypeRef {
|
||||
kind: string;
|
||||
name: string | null;
|
||||
ofType: TypeRef | null;
|
||||
}
|
||||
|
||||
export interface EnumValue {
|
||||
name: string;
|
||||
description: string | null;
|
||||
isDeprecated: boolean;
|
||||
deprecationReason: string | null;
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# scripts/run-integration.sh
|
||||
|
||||
DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
source $DIR/set-env-test.sh
|
||||
|
||||
npx nx database:reset
|
||||
npx nx jest --config ./test/jest-e2e.json
|
||||
@ -1,25 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# scripts/set-env-test.sh
|
||||
|
||||
# Get script's directory
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Construct the absolute path of .env file in the project root directory
|
||||
ENV_PATH="${SCRIPT_DIR}/../.env.test"
|
||||
|
||||
# Check if the file exists
|
||||
if [ -f "${ENV_PATH}" ]; then
|
||||
echo "🔵 - Loading environment variables from "${ENV_PATH}"..."
|
||||
# Export env vars
|
||||
while IFS= read -r line || [ -n "$line" ]; do
|
||||
if echo "$line" | grep -F = &>/dev/null
|
||||
then
|
||||
varname=$(echo "$line" | cut -d '=' -f 1)
|
||||
varvalue=$(echo "$line" | cut -d '=' -f 2- | cut -d '#' -f 1)
|
||||
export "$varname"="$varvalue"
|
||||
fi
|
||||
done < <(grep -v '^#' "${ENV_PATH}")
|
||||
else
|
||||
echo "Error: ${ENV_PATH} does not exist."
|
||||
exit 1
|
||||
fi
|
||||
Reference in New Issue
Block a user