Fixed sync between record value context selector and record store (#5517)

This PR introduces many improvements over the new profiling story
feature, with new tests and some refactor with main :
- Added use-context-selector for getting value faster in display fields
and created useRecordFieldValue() hook and RecordValueSetterEffect to
synchronize states
- Added performance test command in CI
- Refactored ExpandableList drill-downs with FieldFocusContext
- Refactored field button icon logic into getFieldButtonIcon util
- Added RelationFieldDisplay perf story
- Added RecordTableCell perf story
- First split test of useField.. hook with useRelationFieldDisplay()
- Fixed problem with set cell soft focus
- Isolated logic between display / soft focus and edit mode in the
related components to optimize performances for display mode.
- Added warmupRound config for performance story decorator
- Added variance in test reporting
This commit is contained in:
Lucas Bordeau
2024-05-24 16:52:05 +02:00
committed by GitHub
parent 82ec30c957
commit de9321dcd9
47 changed files with 2043 additions and 554 deletions

View File

@ -1,7 +1,10 @@
import { ProfilingDataPoint } from '~/testing/profiling/types/ProfilingDataPoint';
import { ProfilingReport } from '~/testing/profiling/types/ProfilingReportByRun';
export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
export const computeProfilingReport = (
dataPoints: ProfilingDataPoint[],
varianceThreshold?: number,
) => {
const profilingReport = { total: {}, runs: {} } as ProfilingReport;
for (const dataPoint of dataPoints) {
@ -27,8 +30,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
const numberOfIds = ids.length;
profilingReport.runs[runName].average =
profilingReport.runs[runName].sum / numberOfIds;
const mean = profilingReport.runs[runName].sum / numberOfIds;
profilingReport.runs[runName].average = mean;
profilingReport.runs[runName].min = Math.min(
...Object.values(profilingReport.runs[runName].sumById),
@ -38,6 +42,14 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
...Object.values(profilingReport.runs[runName].sumById),
);
const intermediaryValuesForVariance = valuesUnsorted.map((value) =>
Math.pow(value - mean, 2),
);
profilingReport.runs[runName].variance =
intermediaryValuesForVariance.reduce((acc, curr) => acc + curr) /
numberOfIds;
const p50Index = Math.floor(numberOfIds * 0.5);
const p80Index = Math.floor(numberOfIds * 0.8);
const p90Index = Math.floor(numberOfIds * 0.9);
@ -55,9 +67,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
runName.startsWith('real-run'),
);
const runsForTotal = runNamesForTotal.map(
(runName) => profilingReport.runs[runName],
);
const runsForTotal = runNamesForTotal
.map((runName) => profilingReport.runs[runName])
.filter((run) => run.variance < (varianceThreshold ?? 0.2));
profilingReport.total = {
sum: Object.values(runsForTotal).reduce((acc, run) => acc + run.sum, 0),
@ -82,6 +94,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
Object.values(runsForTotal).reduce((acc, run) => acc + run.p99, 0) /
Object.keys(runsForTotal).length,
dataPointCount: dataPoints.length,
variance:
runsForTotal.reduce((acc, run) => acc + run.variance, 0) /
runsForTotal.length,
};
return profilingReport;

View File

@ -11,19 +11,21 @@ export const getProfilingStory = ({
averageThresholdInMs,
numberOfRuns,
numberOfTestsPerRun,
warmUpRounds,
}: {
componentName: string;
p95ThresholdInMs?: number;
averageThresholdInMs: number;
numberOfRuns: number;
numberOfTestsPerRun: number;
warmUpRounds?: number;
}): StoryObj<any> => ({
decorators: [ProfilerDecorator],
parameters: {
numberOfRuns,
numberOfTests: numberOfTestsPerRun,
componentName,
chromatic: { disableSnapshot: true },
warmUpRounds,
},
play: async ({ canvasElement }) => {
await findByTestId(