Fixed sync between record value context selector and record store (#5517)
This PR introduces many improvements over the new profiling story feature, with new tests and some refactor with main : - Added use-context-selector for getting value faster in display fields and created useRecordFieldValue() hook and RecordValueSetterEffect to synchronize states - Added performance test command in CI - Refactored ExpandableList drill-downs with FieldFocusContext - Refactored field button icon logic into getFieldButtonIcon util - Added RelationFieldDisplay perf story - Added RecordTableCell perf story - First split test of useField.. hook with useRelationFieldDisplay() - Fixed problem with set cell soft focus - Isolated logic between display / soft focus and edit mode in the related components to optimize performances for display mode. - Added warmupRound config for performance story decorator - Added variance in test reporting
This commit is contained in:
@ -2,7 +2,7 @@ import { useEffect } from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
|
||||
import { TIME_BETWEEN_TEST_RUNS_IN_MS } from '~/testing/profiling/constants/TimeBetweenTestRunsInMs';
|
||||
import { currentProfilingRunIndexState } from '~/testing/profiling/states/currentProfilingRunState';
|
||||
import { currentProfilingRunIndexState } from '~/testing/profiling/states/currentProfilingRunIndexState';
|
||||
import { profilingQueueState } from '~/testing/profiling/states/profilingQueueState';
|
||||
import { profilingSessionRunsState } from '~/testing/profiling/states/profilingSessionRunsState';
|
||||
import { profilingSessionStatusState } from '~/testing/profiling/states/profilingSessionStatusState';
|
||||
@ -12,10 +12,12 @@ export const ProfilingQueueEffect = ({
|
||||
profilingId,
|
||||
numberOfTestsPerRun,
|
||||
numberOfRuns,
|
||||
warmUpRounds,
|
||||
}: {
|
||||
profilingId: string;
|
||||
numberOfTestsPerRun: number;
|
||||
numberOfRuns: number;
|
||||
warmUpRounds: number;
|
||||
}) => {
|
||||
const [currentProfilingRunIndex, setCurrentProfilingRunIndex] =
|
||||
useRecoilState(currentProfilingRunIndexState);
|
||||
@ -38,9 +40,9 @@ export const ProfilingQueueEffect = ({
|
||||
setCurrentProfilingRunIndex(0);
|
||||
|
||||
const newTestRuns = [
|
||||
'warm-up-1',
|
||||
'warm-up-2',
|
||||
'warm-up-3',
|
||||
...[
|
||||
...Array.from({ length: warmUpRounds }, (_, i) => `warm-up-${i}`),
|
||||
],
|
||||
...[
|
||||
...Array.from({ length: numberOfRuns }, (_, i) => `real-run-${i}`),
|
||||
],
|
||||
@ -76,9 +78,13 @@ export const ProfilingQueueEffect = ({
|
||||
return;
|
||||
}
|
||||
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, TIME_BETWEEN_TEST_RUNS_IN_MS),
|
||||
);
|
||||
const timeInMs = profilingSessionRuns[
|
||||
currentProfilingRunIndex
|
||||
].startsWith('warm-up')
|
||||
? TIME_BETWEEN_TEST_RUNS_IN_MS * 2
|
||||
: TIME_BETWEEN_TEST_RUNS_IN_MS;
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, timeInMs));
|
||||
|
||||
const nextIndex = currentProfilingRunIndex + 1;
|
||||
|
||||
@ -109,6 +115,7 @@ export const ProfilingQueueEffect = ({
|
||||
profilingSessionRuns,
|
||||
setProfilingSessionRuns,
|
||||
numberOfRuns,
|
||||
warmUpRounds,
|
||||
]);
|
||||
|
||||
return <></>;
|
||||
|
||||
@ -3,7 +3,9 @@ import styled from '@emotion/styled';
|
||||
import { useRecoilState } from 'recoil';
|
||||
|
||||
import { PROFILING_REPORTER_DIV_ID } from '~/testing/profiling/constants/ProfilingReporterDivId';
|
||||
import { currentProfilingRunIndexState } from '~/testing/profiling/states/currentProfilingRunIndexState';
|
||||
import { profilingSessionDataPointsState } from '~/testing/profiling/states/profilingSessionDataPointsState';
|
||||
import { profilingSessionStatusState } from '~/testing/profiling/states/profilingSessionStatusState';
|
||||
import { computeProfilingReport } from '~/testing/profiling/utils/computeProfilingReport';
|
||||
|
||||
const StyledTable = styled.table`
|
||||
@ -24,6 +26,12 @@ export const ProfilingReporter = () => {
|
||||
profilingSessionDataPointsState,
|
||||
);
|
||||
|
||||
const [currentProfilingRunIndex] = useRecoilState(
|
||||
currentProfilingRunIndexState,
|
||||
);
|
||||
|
||||
const [profilingSessionStatus] = useRecoilState(profilingSessionStatusState);
|
||||
|
||||
const profilingReport = useMemo(
|
||||
() => computeProfilingReport(profilingSessionDataPoints),
|
||||
[profilingSessionDataPoints],
|
||||
@ -34,6 +42,10 @@ export const ProfilingReporter = () => {
|
||||
data-profiling-report={JSON.stringify(profilingReport)}
|
||||
id={PROFILING_REPORTER_DIV_ID}
|
||||
>
|
||||
<h2>Profiling report</h2>
|
||||
<div>
|
||||
Run #{currentProfilingRunIndex} - Status {profilingSessionStatus}
|
||||
</div>
|
||||
<StyledTable>
|
||||
<thead>
|
||||
<tr>
|
||||
@ -46,6 +58,7 @@ export const ProfilingReporter = () => {
|
||||
<th>P95</th>
|
||||
<th>P99</th>
|
||||
<th>Max</th>
|
||||
<th>Variance</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@ -59,6 +72,9 @@ export const ProfilingReporter = () => {
|
||||
<td>{Math.round(profilingReport.total.p95 * 1000) / 1000}ms</td>
|
||||
<td>{Math.round(profilingReport.total.p99 * 1000) / 1000}ms</td>
|
||||
<td>{Math.round(profilingReport.total.max * 1000) / 1000}ms</td>
|
||||
<td>
|
||||
{Math.round(profilingReport.total.variance * 1000000) / 1000000}
|
||||
</td>
|
||||
</tr>
|
||||
{Object.entries(profilingReport.runs).map(([runName, report]) => (
|
||||
<tr key={runName}>
|
||||
@ -71,6 +87,7 @@ export const ProfilingReporter = () => {
|
||||
<td>{Math.round(report.p95 * 1000) / 1000}ms</td>
|
||||
<td>{Math.round(report.p99 * 1000) / 1000}ms</td>
|
||||
<td>{Math.round(report.max * 1000) / 1000}ms</td>
|
||||
<td>{Math.round(report.variance * 1000000) / 1000000}</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
|
||||
@ -0,0 +1,6 @@
|
||||
import { atom } from 'recoil';
|
||||
|
||||
export const currentProfilingRunIndexState = atom<number>({
|
||||
key: 'currentProfilingRunIndexState',
|
||||
default: 0,
|
||||
});
|
||||
@ -10,6 +10,7 @@ export type ProfilingReportItem = {
|
||||
p99: number;
|
||||
min: number;
|
||||
max: number;
|
||||
variance: number;
|
||||
};
|
||||
|
||||
export type ProfilingReport = {
|
||||
|
||||
@ -1,7 +1,10 @@
|
||||
import { ProfilingDataPoint } from '~/testing/profiling/types/ProfilingDataPoint';
|
||||
import { ProfilingReport } from '~/testing/profiling/types/ProfilingReportByRun';
|
||||
|
||||
export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
|
||||
export const computeProfilingReport = (
|
||||
dataPoints: ProfilingDataPoint[],
|
||||
varianceThreshold?: number,
|
||||
) => {
|
||||
const profilingReport = { total: {}, runs: {} } as ProfilingReport;
|
||||
|
||||
for (const dataPoint of dataPoints) {
|
||||
@ -27,8 +30,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
|
||||
|
||||
const numberOfIds = ids.length;
|
||||
|
||||
profilingReport.runs[runName].average =
|
||||
profilingReport.runs[runName].sum / numberOfIds;
|
||||
const mean = profilingReport.runs[runName].sum / numberOfIds;
|
||||
|
||||
profilingReport.runs[runName].average = mean;
|
||||
|
||||
profilingReport.runs[runName].min = Math.min(
|
||||
...Object.values(profilingReport.runs[runName].sumById),
|
||||
@ -38,6 +42,14 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
|
||||
...Object.values(profilingReport.runs[runName].sumById),
|
||||
);
|
||||
|
||||
const intermediaryValuesForVariance = valuesUnsorted.map((value) =>
|
||||
Math.pow(value - mean, 2),
|
||||
);
|
||||
|
||||
profilingReport.runs[runName].variance =
|
||||
intermediaryValuesForVariance.reduce((acc, curr) => acc + curr) /
|
||||
numberOfIds;
|
||||
|
||||
const p50Index = Math.floor(numberOfIds * 0.5);
|
||||
const p80Index = Math.floor(numberOfIds * 0.8);
|
||||
const p90Index = Math.floor(numberOfIds * 0.9);
|
||||
@ -55,9 +67,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
|
||||
runName.startsWith('real-run'),
|
||||
);
|
||||
|
||||
const runsForTotal = runNamesForTotal.map(
|
||||
(runName) => profilingReport.runs[runName],
|
||||
);
|
||||
const runsForTotal = runNamesForTotal
|
||||
.map((runName) => profilingReport.runs[runName])
|
||||
.filter((run) => run.variance < (varianceThreshold ?? 0.2));
|
||||
|
||||
profilingReport.total = {
|
||||
sum: Object.values(runsForTotal).reduce((acc, run) => acc + run.sum, 0),
|
||||
@ -82,6 +94,9 @@ export const computeProfilingReport = (dataPoints: ProfilingDataPoint[]) => {
|
||||
Object.values(runsForTotal).reduce((acc, run) => acc + run.p99, 0) /
|
||||
Object.keys(runsForTotal).length,
|
||||
dataPointCount: dataPoints.length,
|
||||
variance:
|
||||
runsForTotal.reduce((acc, run) => acc + run.variance, 0) /
|
||||
runsForTotal.length,
|
||||
};
|
||||
|
||||
return profilingReport;
|
||||
|
||||
@ -11,19 +11,21 @@ export const getProfilingStory = ({
|
||||
averageThresholdInMs,
|
||||
numberOfRuns,
|
||||
numberOfTestsPerRun,
|
||||
warmUpRounds,
|
||||
}: {
|
||||
componentName: string;
|
||||
p95ThresholdInMs?: number;
|
||||
averageThresholdInMs: number;
|
||||
numberOfRuns: number;
|
||||
numberOfTestsPerRun: number;
|
||||
warmUpRounds?: number;
|
||||
}): StoryObj<any> => ({
|
||||
decorators: [ProfilerDecorator],
|
||||
parameters: {
|
||||
numberOfRuns,
|
||||
numberOfTests: numberOfTestsPerRun,
|
||||
componentName,
|
||||
chromatic: { disableSnapshot: true },
|
||||
warmUpRounds,
|
||||
},
|
||||
play: async ({ canvasElement }) => {
|
||||
await findByTestId(
|
||||
|
||||
Reference in New Issue
Block a user