diff --git a/.changeset/chatty-paws-turn.md b/.changeset/chatty-paws-turn.md
new file mode 100644
index 000000000..c443c7f33
--- /dev/null
+++ b/.changeset/chatty-paws-turn.md
@@ -0,0 +1,8 @@
+---
+'@callstack/reassure-measure': minor
+'reassure': minor
+'test-app-native': minor
+---
+
+- Rename `measurePerformance` to `measureRenders`.
+- Add `writeFile` option to `measureRenders`/`measureFunction`.
diff --git a/README.md b/README.md
index 1f894bb01..fdb0a916e 100644
--- a/README.md
+++ b/README.md
@@ -94,11 +94,11 @@ Now that the library is installed, you can write your first test scenario in a f
```ts
// ComponentUnderTest.perf-test.tsx
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { ComponentUnderTest } from './ComponentUnderTest';
test('Simple test', async () => {
- await measurePerformance();
+ await measureRenders();
});
```
@@ -111,7 +111,7 @@ This test will measure render times of `ComponentUnderTest` during mounting and
If your component contains any async logic or you want to test some interaction, you should pass the `scenario` option:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { screen, fireEvent } from '@testing-library/react-native';
import { ComponentUnderTest } from './ComponentUnderTest';
@@ -121,7 +121,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -130,7 +130,7 @@ The body of the `scenario` function is using familiar React Native Testing Libra
In case of using a version of React Native Testing Library lower than v10.1.0, where [`screen` helper](https://callstack.github.io/react-native-testing-library/docs/api/#screen) is not available, the `scenario` function provides it as its first argument:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { fireEvent } from '@testing-library/react-native';
test('Test with scenario', async () => {
@@ -139,7 +139,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -352,27 +352,28 @@ Looking at the example, you can notice that test scenarios can be assigned to ce
### Measurements
-#### `measurePerformance` function
+#### `measureRenders` function
Custom wrapper for the RNTL `render` function responsible for rendering the passed screen inside a `React.Profiler` component,
measuring its performance and writing results to the output file. You can use the optional `options` object that allows customizing aspects
of the testing
```ts
-async function measurePerformance(
+async function measureRenders(
ui: React.ReactElement,
- options?: MeasureOptions,
+ options?: MeasureRendersOptions,
): Promise {
```
-#### `MeasureOptions` type
+#### `MeasureRendersOptions` type
```ts
-interface MeasureOptions {
+interface MeasureRendersOptions {
runs?: number;
warmupRuns?: number;
wrapper?: React.ComponentType<{ children: ReactElement }>;
scenario?: (view?: RenderResult) => Promise;
+ writeFile?: boolean;
}
```
@@ -380,6 +381,7 @@ interface MeasureOptions {
- **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs (default 1).
- **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results; only the wrapped component is measured.
- **`scenario`**: a custom async function, which defines user interaction within the UI by utilising RNTL or RTL functions
+- **`writeFile`**: (default `true`) should write output to file.
#### `measureFunction` function
@@ -435,10 +437,11 @@ const defaultConfig: Config = {
```
**`runs`**: the number of repeated runs in a series per test (allows for higher accuracy by aggregating more data). Should be handled with care.
+
- **`warmupRuns`**: the number of additional warmup runs that will be done and discarded before the actual runs.
-**`outputFile`**: the name of the file the records will be saved to
-**`verbose`**: make Reassure log more, e.g. for debugging purposes
-**`testingLibrary`**: where to look for `render` and `cleanup` functions, supported values `'react-native'`, `'react'` or object providing custom `render` and `cleanup` functions
+ **`outputFile`**: the name of the file the records will be saved to
+ **`verbose`**: make Reassure log more, e.g. for debugging purposes
+ **`testingLibrary`**: where to look for `render` and `cleanup` functions, supported values `'react-native'`, `'react'` or object providing custom `render` and `cleanup` functions
#### `configure` function
@@ -448,10 +451,10 @@ function configure(customConfig: Partial): void;
The `configure` function can override the default config parameters.
-#### `resetToDefault` function
+#### `resetToDefaults` function
```ts
-resetToDefault(): void
+resetToDefaults(): void
```
Reset the current config to the original `defaultConfig` object
diff --git a/docusaurus/docs/api.md b/docusaurus/docs/api.md
index 0e13c0a8d..622dce355 100644
--- a/docusaurus/docs/api.md
+++ b/docusaurus/docs/api.md
@@ -7,24 +7,30 @@ sidebar_position: 4
## Measurements
-### `measurePerformance` function
+### `measureRenders()` function {#measure-renders}
-Custom wrapper for the RNTL `render` function responsible for rendering the passed screen inside a `React.Profiler` component,
+:::info
+
+Prior to version 1.0, this function has been named `measurePerformance`.
+
+:::
+
+Custom wrapper for the RNTL/RTL's `render` function responsible for rendering the passed screen inside a `React.Profiler` component,
measuring its performance and writing results to the output file. You can use optional `options` object allows customizing aspects
-of the testing
+of the testing.
```ts
-async function measurePerformance(
+async function measureRenders(
ui: React.ReactElement,
- options?: MeasureOptions,
+ options?: MeasureRendersOptions,
): Promise {
```
-#### Example
+#### Example {#measure-renders-example}
```ts
// sample.perf-test.tsx
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { screen, fireEvent } from '@testing-library/react-native';
import { ComponentUnderTest } from './ComponentUnderTest';
@@ -34,18 +40,19 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
-### `MeasureOptions` type
+### `MeasureRendersOptions` type {#measure-renders-options}
```ts
-interface MeasureOptions {
+interface MeasureRendersOptions {
runs?: number;
warmupRuns?: number;
wrapper?: React.ComponentType<{ children: ReactElement }>;
scenario?: (view?: RenderResult) => Promise;
+ writeFile?: boolean;
}
```
@@ -53,8 +60,9 @@ interface MeasureOptions {
- **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs.
- **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results, only the wrapped component is measured.
- **`scenario`**: a custom async function, which defines user interaction within the ui by utilized RNTL functions
+- **`writeFile`**: (default `true`) should write output to file.
-### `measureFunction` function
+### `measureFunction` function {#measure-function}
Allows you to wrap any synchronous function, measure its performance and write results to the output file. You can use optional `options` to customize aspects of the testing.
@@ -65,7 +73,7 @@ async function measureFunction(
): Promise {
```
-#### Example
+#### Example {#measure-function-example}
```ts
// sample.perf-test.tsx
@@ -77,17 +85,19 @@ test('fib 30', async () => {
});
```
-### `MeasureFunctionOptions` type
+### `MeasureFunctionOptions` type {#measure-function-options}
```ts
interface MeasureFunctionOptions {
runs?: number;
warmupRuns?: number;
+ writeFile?: boolean;
}
```
- **`runs`**: number of runs per series for the particular test
- **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs.
+- **`writeFile`**: (default `true`) should write output to file.
## Configuration
@@ -133,7 +143,7 @@ function configure(customConfig: Partial): void;
You can use the `configure` function to override the default config parameters.
-#### Example
+#### Example {#configure-example}
```ts
import { configure } from 'reassure';
@@ -144,13 +154,13 @@ configure({
});
```
-### `resetToDefault` function
+### `resetToDefaults` function {#reset-to-defaults}
```ts
-resetToDefault(): void
+resetToDefaults(): void
```
-Reset current config to the original `defaultConfig` object. You can call `resetToDefault()` anywhere in your performance test file.
+Reset current config to the original `defaultConfig` object. You can call `resetToDefaults()` anywhere in your performance test file.
### Environmental variables
diff --git a/docusaurus/docs/installation.md b/docusaurus/docs/installation.md
index e98d179b1..4292cd431 100644
--- a/docusaurus/docs/installation.md
+++ b/docusaurus/docs/installation.md
@@ -44,10 +44,10 @@ Now that the library is installed, you can write you first test scenario in a fi
```ts
// ComponentUnderTest.perf-test.tsx
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
test('Simple test', async () => {
- await measurePerformance();
+ await measureRenders();
});
```
@@ -60,7 +60,7 @@ This test will measure render times of `ComponentUnderTest` during mounting and
If your component contains any async logic or you want to test some interaction you should pass the `scenario` option:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { screen, fireEvent } from '@testing-library/react-native';
test('Test with scenario', async () => {
@@ -69,7 +69,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -78,7 +78,7 @@ The body of the `scenario` function is using familiar React Native Testing Libra
In case of using a version of React Native Testing Library lower than v10.1.0, where [`screen` helper](https://callstack.github.io/react-native-testing-library/docs/api/#screen) is not available, the `scenario` function provides it as its first argument:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { fireEvent } from '@testing-library/react-native';
test('Test with scenario', async () => {
@@ -87,7 +87,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -95,6 +95,7 @@ If your test contains any async changes, you will need to make sure that the sce
`findBy` queries, `waitFor` or `waitForElementToBeRemoved` functions from RNTL.
For more examples look into our example apps:
+
- [React Native (CLI)](https://github.com/callstack/reassure-examples/tree/main/examples/native)
- [React Native (Expo)](https://github.com/callstack/reassure-examples/tree/main/examples/native-expo)
- [React (Next.js)](https://github.com/callstack/reassure-examples/tree/main/examples/web-nextjs)
@@ -253,7 +254,7 @@ for performance tests you can add following override to your `.eslintrc` file:
rules: {
'jest/expect-expect': [
'error',
- { assertFunctionNames: ['expect', 'measurePerformance'] },
+ { assertFunctionNames: ['expect', 'measureRenders'] },
],
}
```
diff --git a/packages/reassure-compare/src/output/json.ts b/packages/reassure-compare/src/output/json.ts
index c2c84fbdd..5637daaf3 100644
--- a/packages/reassure-compare/src/output/json.ts
+++ b/packages/reassure-compare/src/output/json.ts
@@ -12,7 +12,7 @@ export async function writeToJson(filePath: string, data: CompareResult) {
} catch (error) {
logger.error(`❌ Could not write JSON output file ${filePath}`);
logger.error(`🔗 ${path.resolve(filePath)}`);
- logger.error(error);
+ logger.error('Error details:', error);
throw error;
}
}
diff --git a/packages/reassure-compare/src/output/markdown.ts b/packages/reassure-compare/src/output/markdown.ts
index bf74de0f4..a4ca92850 100644
--- a/packages/reassure-compare/src/output/markdown.ts
+++ b/packages/reassure-compare/src/output/markdown.ts
@@ -43,7 +43,7 @@ async function writeToFile(filePath: string, content: string) {
} catch (error) {
logger.error(`❌ Could not write markdown output file ${filePath}`);
logger.error(`🔗 ${path.resolve(filePath)}`);
- logger.error(error);
+ logger.error('Error details:', error);
throw error;
}
}
diff --git a/packages/reassure-logger/src/index.ts b/packages/reassure-logger/src/index.ts
index 07bc209e3..435c061e0 100644
--- a/packages/reassure-logger/src/index.ts
+++ b/packages/reassure-logger/src/index.ts
@@ -1 +1,2 @@
export * as logger from './logger';
+export { warnOnce } from './warn-once';
diff --git a/packages/reassure-logger/src/logger.ts b/packages/reassure-logger/src/logger.ts
index 8f2f3c11d..181202ebd 100644
--- a/packages/reassure-logger/src/logger.ts
+++ b/packages/reassure-logger/src/logger.ts
@@ -28,32 +28,32 @@ export function configure(options: Partial) {
// Jest is wrapping console.* calls, so we need to get the raw console object
const rawConsole = require('console') as typeof console;
-export function error(...args: unknown[]) {
- rawConsole.error(colorError(...args));
+export function error(message?: string, ...args: unknown[]) {
+ rawConsole.error(colorError(message, ...args));
}
-export function warn(...args: unknown[]) {
+export function warn(message?: string, ...args: unknown[]) {
if (config.silent) return;
- rawConsole.warn(colorWarn(...args));
+ rawConsole.warn(colorWarn(message, ...args));
}
-export function log(...args: unknown[]) {
+export function log(message?: string, ...args: unknown[]) {
if (config.silent) return;
- rawConsole.log(...args);
+ rawConsole.log(message, ...args);
}
-export function verbose(...args: unknown[]) {
+export function verbose(message?: string, ...args: unknown[]) {
if (!config.verbose || config.silent) return;
- rawConsole.log(colorVerbose(...args));
+ rawConsole.log(colorVerbose(message, ...args));
}
export function color(color: keyof typeof colors, ...args: unknown[]) {
if (config.silent) return;
- return rawConsole.log(chalk.hex(colors[color])(args));
+ return rawConsole.log(chalk.hex(colors[color])(...args));
}
/** Log message that indicates progress of operation, does not output the trailing newline. */
diff --git a/packages/reassure-logger/src/warn-once.ts b/packages/reassure-logger/src/warn-once.ts
new file mode 100644
index 000000000..cfc7c3a05
--- /dev/null
+++ b/packages/reassure-logger/src/warn-once.ts
@@ -0,0 +1,12 @@
+import { warn } from './logger';
+
+const warned = new Set();
+
+export function warnOnce(message: string, ...args: unknown[]) {
+ if (warned.has(message)) {
+ return;
+ }
+
+ warn(message, ...args);
+ warned.add(message);
+}
diff --git a/packages/reassure-measure/src/__tests__/measure-function.test.tsx b/packages/reassure-measure/src/__tests__/measure-function.test.tsx
index 01555c166..e8247cf45 100644
--- a/packages/reassure-measure/src/__tests__/measure-function.test.tsx
+++ b/packages/reassure-measure/src/__tests__/measure-function.test.tsx
@@ -1,4 +1,4 @@
-import { measureFunctionInternal } from '../measure-function';
+import { measureFunction } from '../measure-function';
import { resetHasShownFlagsOutput } from '../output';
// Exponentially slow function
@@ -10,18 +10,18 @@ function fib(n: number): number {
return fib(n - 1) + fib(n - 2);
}
-test('measureFunctionInternal captures results', () => {
+test('measureFunction captures results', async () => {
const fn = jest.fn(() => fib(5));
- const results = measureFunctionInternal(fn, { runs: 1, warmupRuns: 0 });
+ const results = await measureFunction(fn, { runs: 1, warmupRuns: 0, writeFile: false });
expect(fn).toHaveBeenCalledTimes(1);
expect(results.runs).toBe(1);
expect(results.counts).toEqual([1]);
});
-test('measureFunctionInternal runs specified number of times', () => {
+test('measureFunction runs specified number of times', async () => {
const fn = jest.fn(() => fib(5));
- const results = measureFunctionInternal(fn, { runs: 20, warmupRuns: 0 });
+ const results = await measureFunction(fn, { runs: 20, warmupRuns: 0, writeFile: false });
expect(fn).toHaveBeenCalledTimes(20);
expect(results.runs).toBe(20);
@@ -31,9 +31,9 @@ test('measureFunctionInternal runs specified number of times', () => {
expect(results.stdevCount).toBe(0);
});
-test('measureFunctionInternal applies "warmupRuns" option', () => {
+test('measureFunction applies "warmupRuns" option', async () => {
const fn = jest.fn(() => fib(5));
- const results = measureFunctionInternal(fn, { runs: 10, warmupRuns: 1 });
+ const results = await measureFunction(fn, { runs: 10, warmupRuns: 1, writeFile: false });
expect(fn).toHaveBeenCalledTimes(11);
expect(results.runs).toBe(10);
@@ -54,9 +54,9 @@ beforeEach(() => {
});
});
-test('measureFunctionInternal should log error when running under incorrect node flags', () => {
+test('measureFunction should log error when running under incorrect node flags', async () => {
resetHasShownFlagsOutput();
- const results = measureFunctionInternal(jest.fn(), { runs: 1 });
+ const results = await measureFunction(jest.fn(), { runs: 1, writeFile: false });
expect(results.runs).toBe(1);
expect(realConsole.error).toHaveBeenCalledWith(`❌ Measure code is running under incorrect Node.js configuration.
diff --git a/packages/reassure-measure/src/__tests__/measure-render.test.tsx b/packages/reassure-measure/src/__tests__/measure-renders.test.tsx
similarity index 79%
rename from packages/reassure-measure/src/__tests__/measure-render.test.tsx
rename to packages/reassure-measure/src/__tests__/measure-renders.test.tsx
index 15bbc43c1..2a9a5af48 100644
--- a/packages/reassure-measure/src/__tests__/measure-render.test.tsx
+++ b/packages/reassure-measure/src/__tests__/measure-renders.test.tsx
@@ -1,6 +1,6 @@
import * as React from 'react';
import { View } from 'react-native';
-import { buildUiToRender, measureRender } from '../measure-render';
+import { buildUiToRender, measureRenders } from '../measure-renders';
import { resetHasShownFlagsOutput } from '../output';
const errorsToIgnore = ['❌ Measure code is running under incorrect Node.js configuration.'];
@@ -14,9 +14,9 @@ beforeEach(() => {
});
});
-test('measureRender run test given number of times', async () => {
+test('measureRenders run test given number of times', async () => {
const scenario = jest.fn(() => Promise.resolve(null));
- const results = await measureRender(, { runs: 20, scenario });
+ const results = await measureRenders(, { runs: 20, scenario, writeFile: false });
expect(results.runs).toBe(20);
expect(results.durations).toHaveLength(20);
expect(results.counts).toHaveLength(20);
@@ -27,9 +27,9 @@ test('measureRender run test given number of times', async () => {
expect(scenario).toHaveBeenCalledTimes(21);
});
-test('measureRender applies "warmupRuns" option', async () => {
+test('measureRenders applies "warmupRuns" option', async () => {
const scenario = jest.fn(() => Promise.resolve(null));
- const results = await measureRender(, { runs: 10, scenario });
+ const results = await measureRenders(, { runs: 10, scenario, writeFile: false });
expect(scenario).toHaveBeenCalledTimes(11);
expect(results.runs).toBe(10);
@@ -39,9 +39,9 @@ test('measureRender applies "warmupRuns" option', async () => {
expect(results.stdevCount).toBe(0);
});
-test('measureRender should log error when running under incorrect node flags', async () => {
+test('measureRenders should log error when running under incorrect node flags', async () => {
resetHasShownFlagsOutput();
- const results = await measureRender(, { runs: 1 });
+ const results = await measureRenders(, { runs: 1, writeFile: false });
expect(results.runs).toBe(1);
expect(realConsole.error).toHaveBeenCalledWith(`❌ Measure code is running under incorrect Node.js configuration.
@@ -53,8 +53,8 @@ function IgnoreChildren(_: React.PropsWithChildren<{}>) {
return ;
}
-test('measureRender does not measure wrapper execution', async () => {
- const results = await measureRender(, { wrapper: IgnoreChildren });
+test('measureRenders does not measure wrapper execution', async () => {
+ const results = await measureRenders(, { wrapper: IgnoreChildren, writeFile: false });
expect(results.runs).toBe(10);
expect(results.durations).toHaveLength(10);
expect(results.counts).toHaveLength(10);
diff --git a/packages/reassure-measure/src/config.ts b/packages/reassure-measure/src/config.ts
index 88f91890a..01e9f7313 100644
--- a/packages/reassure-measure/src/config.ts
+++ b/packages/reassure-measure/src/config.ts
@@ -26,6 +26,6 @@ export function configure(customConfig: Partial) {
};
}
-export function resetToDefault() {
+export function resetToDefaults() {
config = defaultConfig;
}
diff --git a/packages/reassure-measure/src/index.ts b/packages/reassure-measure/src/index.ts
index 1960bd41a..fea111d2e 100644
--- a/packages/reassure-measure/src/index.ts
+++ b/packages/reassure-measure/src/index.ts
@@ -1,4 +1,5 @@
-export { configure, resetToDefault } from './config';
-export { measurePerformance } from './measure-render';
+export { configure, resetToDefaults } from './config';
+export { measureRenders, measurePerformance } from './measure-renders';
export { measureFunction } from './measure-function';
-export type { MeasureOptions } from './measure-render';
+export type { MeasureRendersOptions } from './measure-renders';
+export type { MeasureFunctionOptions } from './measure-function';
diff --git a/packages/reassure-measure/src/measure-function.tsx b/packages/reassure-measure/src/measure-function.tsx
index 818a291a6..61620fb73 100644
--- a/packages/reassure-measure/src/measure-function.tsx
+++ b/packages/reassure-measure/src/measure-function.tsx
@@ -4,19 +4,23 @@ import type { MeasureResults } from './types';
import { type RunResult, processRunResults } from './measure-helpers';
import { showFlagsOutputIfNeeded, writeTestStats } from './output';
-interface MeasureFunctionOptions {
+export interface MeasureFunctionOptions {
runs?: number;
warmupRuns?: number;
+ writeFile?: boolean;
}
export async function measureFunction(fn: () => void, options?: MeasureFunctionOptions): Promise {
const stats = await measureFunctionInternal(fn, options);
- await writeTestStats(stats, 'function');
+
+ if (options?.writeFile !== false) {
+ await writeTestStats(stats, 'function');
+ }
return stats;
}
-export function measureFunctionInternal(fn: () => void, options?: MeasureFunctionOptions): MeasureResults {
+function measureFunctionInternal(fn: () => void, options?: MeasureFunctionOptions): MeasureResults {
const runs = options?.runs ?? config.runs;
const warmupRuns = options?.warmupRuns ?? config.warmupRuns;
diff --git a/packages/reassure-measure/src/measure-render.tsx b/packages/reassure-measure/src/measure-renders.tsx
similarity index 66%
rename from packages/reassure-measure/src/measure-render.tsx
rename to packages/reassure-measure/src/measure-renders.tsx
index ae864272e..06a4f8e11 100644
--- a/packages/reassure-measure/src/measure-render.tsx
+++ b/packages/reassure-measure/src/measure-renders.tsx
@@ -1,9 +1,9 @@
import * as React from 'react';
-import { logger } from '@callstack/reassure-logger';
+import { logger, warnOnce } from '@callstack/reassure-logger';
import { config } from './config';
import { RunResult, processRunResults } from './measure-helpers';
import { showFlagsOutputIfNeeded, writeTestStats } from './output';
-import { resolveTestingLibrary } from './testingLibrary';
+import { resolveTestingLibrary } from './testing-library';
import type { MeasureResults } from './types';
logger.configure({
@@ -11,21 +11,42 @@ logger.configure({
silent: process.env.REASSURE_SILENT === 'true' || process.env.REASSURE_SILENT === '1',
});
-export interface MeasureOptions {
+export interface MeasureRendersOptions {
runs?: number;
warmupRuns?: number;
wrapper?: React.ComponentType<{ children: React.ReactElement }>;
scenario?: (screen: any) => Promise;
+ writeFile?: boolean;
}
-export async function measurePerformance(ui: React.ReactElement, options?: MeasureOptions): Promise {
- const stats = await measureRender(ui, options);
- await writeTestStats(stats, 'render');
+export async function measureRenders(ui: React.ReactElement, options?: MeasureRendersOptions): Promise {
+ const stats = await measureRendersInternal(ui, options);
+
+ if (options?.writeFile !== false) {
+ await writeTestStats(stats, 'render');
+ }
return stats;
}
-export async function measureRender(ui: React.ReactElement, options?: MeasureOptions): Promise {
+/**
+ * @deprecated The `measurePerformance` function has been renamed to `measureRenders`. The `measurePerformance` alias is now deprecated and will be removed in future releases.
+ */
+export async function measurePerformance(
+ ui: React.ReactElement,
+ options?: MeasureRendersOptions
+): Promise {
+ warnOnce(
+ 'The `measurePerformance` function has been renamed to `measureRenders`.\n\nThe `measurePerformance` alias is now deprecated and will be removed in future releases.'
+ );
+
+ return await measureRenders(ui, options);
+}
+
+async function measureRendersInternal(
+ ui: React.ReactElement,
+ options?: MeasureRendersOptions
+): Promise {
const runs = options?.runs ?? config.runs;
const scenario = options?.scenario;
const warmupRuns = options?.warmupRuns ?? config.warmupRuns;
diff --git a/packages/reassure-measure/src/testingLibrary.ts b/packages/reassure-measure/src/testing-library.ts
similarity index 100%
rename from packages/reassure-measure/src/testingLibrary.ts
rename to packages/reassure-measure/src/testing-library.ts
diff --git a/packages/reassure/README.md b/packages/reassure/README.md
index 1f894bb01..fdb0a916e 100644
--- a/packages/reassure/README.md
+++ b/packages/reassure/README.md
@@ -94,11 +94,11 @@ Now that the library is installed, you can write your first test scenario in a f
```ts
// ComponentUnderTest.perf-test.tsx
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { ComponentUnderTest } from './ComponentUnderTest';
test('Simple test', async () => {
- await measurePerformance();
+ await measureRenders();
});
```
@@ -111,7 +111,7 @@ This test will measure render times of `ComponentUnderTest` during mounting and
If your component contains any async logic or you want to test some interaction, you should pass the `scenario` option:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { screen, fireEvent } from '@testing-library/react-native';
import { ComponentUnderTest } from './ComponentUnderTest';
@@ -121,7 +121,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -130,7 +130,7 @@ The body of the `scenario` function is using familiar React Native Testing Libra
In case of using a version of React Native Testing Library lower than v10.1.0, where [`screen` helper](https://callstack.github.io/react-native-testing-library/docs/api/#screen) is not available, the `scenario` function provides it as its first argument:
```ts
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { fireEvent } from '@testing-library/react-native';
test('Test with scenario', async () => {
@@ -139,7 +139,7 @@ test('Test with scenario', async () => {
await screen.findByText('Done');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});
```
@@ -352,27 +352,28 @@ Looking at the example, you can notice that test scenarios can be assigned to ce
### Measurements
-#### `measurePerformance` function
+#### `measureRenders` function
Custom wrapper for the RNTL `render` function responsible for rendering the passed screen inside a `React.Profiler` component,
measuring its performance and writing results to the output file. You can use the optional `options` object that allows customizing aspects
of the testing
```ts
-async function measurePerformance(
+async function measureRenders(
ui: React.ReactElement,
- options?: MeasureOptions,
+ options?: MeasureRendersOptions,
): Promise {
```
-#### `MeasureOptions` type
+#### `MeasureRendersOptions` type
```ts
-interface MeasureOptions {
+interface MeasureRendersOptions {
runs?: number;
warmupRuns?: number;
wrapper?: React.ComponentType<{ children: ReactElement }>;
scenario?: (view?: RenderResult) => Promise;
+ writeFile?: boolean;
}
```
@@ -380,6 +381,7 @@ interface MeasureOptions {
- **`warmupRuns`**: number of additional warmup runs that will be done and discarded before the actual runs (default 1).
- **`wrapper`**: React component, such as a `Provider`, which the `ui` will be wrapped with. Note: the render duration of the `wrapper` itself is excluded from the results; only the wrapped component is measured.
- **`scenario`**: a custom async function, which defines user interaction within the UI by utilising RNTL or RTL functions
+- **`writeFile`**: (default `true`) should write output to file.
#### `measureFunction` function
@@ -435,10 +437,11 @@ const defaultConfig: Config = {
```
**`runs`**: the number of repeated runs in a series per test (allows for higher accuracy by aggregating more data). Should be handled with care.
+
- **`warmupRuns`**: the number of additional warmup runs that will be done and discarded before the actual runs.
-**`outputFile`**: the name of the file the records will be saved to
-**`verbose`**: make Reassure log more, e.g. for debugging purposes
-**`testingLibrary`**: where to look for `render` and `cleanup` functions, supported values `'react-native'`, `'react'` or object providing custom `render` and `cleanup` functions
+ **`outputFile`**: the name of the file the records will be saved to
+ **`verbose`**: make Reassure log more, e.g. for debugging purposes
+ **`testingLibrary`**: where to look for `render` and `cleanup` functions, supported values `'react-native'`, `'react'` or object providing custom `render` and `cleanup` functions
#### `configure` function
@@ -448,10 +451,10 @@ function configure(customConfig: Partial): void;
The `configure` function can override the default config parameters.
-#### `resetToDefault` function
+#### `resetToDefaults` function
```ts
-resetToDefault(): void
+resetToDefaults(): void
```
Reset the current config to the original `defaultConfig` object
diff --git a/packages/reassure/src/index.ts b/packages/reassure/src/index.ts
index de28799b9..254cdd67e 100644
--- a/packages/reassure/src/index.ts
+++ b/packages/reassure/src/index.ts
@@ -1,3 +1,10 @@
-export { measurePerformance, measureFunction, configure, resetToDefault } from '@callstack/reassure-measure';
-
+export {
+ measureRenders,
+ measureFunction,
+ configure,
+ resetToDefaults,
+ measurePerformance,
+} from '@callstack/reassure-measure';
export { dangerReassure } from '@callstack/reassure-danger';
+
+export type { MeasureRendersOptions, MeasureFunctionOptions } from '@callstack/reassure-measure';
diff --git a/test-apps/native/.eslintrc.js b/test-apps/native/.eslintrc.js
index 273dfba6e..30b839942 100644
--- a/test-apps/native/.eslintrc.js
+++ b/test-apps/native/.eslintrc.js
@@ -10,7 +10,7 @@ module.exports = {
{
assertFunctionNames: [
'expect',
- 'measurePerformance',
+ 'measureRenders',
'measureFunction',
],
},
diff --git a/test-apps/native/src/OtherTest.perf-test.tsx b/test-apps/native/src/OtherTest.perf-test.tsx
index bf3b09fe9..0ccf06ad8 100644
--- a/test-apps/native/src/OtherTest.perf-test.tsx
+++ b/test-apps/native/src/OtherTest.perf-test.tsx
@@ -1,7 +1,7 @@
import React from 'react';
import { View, Text, Pressable } from 'react-native';
import { fireEvent, RenderAPI, screen } from '@testing-library/react-native';
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { SlowList } from './SlowList';
@@ -35,7 +35,7 @@ test('Other Component 10', async () => {
await screen.findByText('Count: 2');
};
- await measurePerformance(, { scenario, runs: 10 });
+ await measureRenders(, { scenario, runs: 10 });
});
test('Other Component 10 legacy scenario', async () => {
@@ -47,7 +47,7 @@ test('Other Component 10 legacy scenario', async () => {
await screen.findByText('Count: 2');
};
- await measurePerformance(, { scenario, runs: 10 });
+ await measureRenders(, { scenario, runs: 10 });
});
test('Other Component 20', async () => {
@@ -59,5 +59,5 @@ test('Other Component 20', async () => {
await screen.findByText('Count: 2');
};
- await measurePerformance(, { scenario, runs: 20 });
+ await measureRenders(, { scenario, runs: 20 });
});
diff --git a/test-apps/native/src/SlowList.perf-test.tsx b/test-apps/native/src/SlowList.perf-test.tsx
index ddb4d15ae..081b946f7 100644
--- a/test-apps/native/src/SlowList.perf-test.tsx
+++ b/test-apps/native/src/SlowList.perf-test.tsx
@@ -1,7 +1,7 @@
import * as React from 'react';
import { View, Text, Pressable } from 'react-native';
import { fireEvent, screen } from '@testing-library/react-native';
-import { measurePerformance } from 'reassure';
+import { measureRenders } from 'reassure';
import { SlowList } from './SlowList';
const AsyncComponent = () => {
@@ -42,5 +42,5 @@ test('Async Component', async () => {
await screen.findByText('Count: 5');
};
- await measurePerformance(, { scenario });
+ await measureRenders(, { scenario });
});