// Generated by dts-bundle-generator v9.5.1 /** * Minimal `EventEmitter` interface that is molded against the Node.js * `EventEmitter` interface. */ export declare class EventEmitter { static prefixed: string | boolean; /** * Return an array listing the events for which the emitter has registered * listeners. */ eventNames(): Array>; /** * Return the listeners registered for a given event. */ listeners>(event: T): Array>; /** * Return the number of listeners listening to a given event. */ listenerCount(event: EventEmitter.EventNames): number; /** * Calls each of the listeners registered for a given event. */ emit>(event: T, ...args: EventEmitter.EventArgs): boolean; /** * Add a listener for a given event. */ on>(event: T, fn: EventEmitter.EventListener, context?: Context): this; addListener>(event: T, fn: EventEmitter.EventListener, context?: Context): this; /** * Add a one-time listener for a given event. */ once>(event: T, fn: EventEmitter.EventListener, context?: Context): this; /** * Remove the listeners of a given event. */ removeListener>(event: T, fn?: EventEmitter.EventListener, context?: Context, once?: boolean): this; off>(event: T, fn?: EventEmitter.EventListener, context?: Context, once?: boolean): this; /** * Remove all listeners, or those of the specified event. */ removeAllListeners(event?: EventEmitter.EventNames): this; } export declare namespace EventEmitter { export interface ListenerFn { (...args: Args): void; } export interface EventEmitterStatic { new (): EventEmitter; } /** * `object` should be in either of the following forms: * ``` * interface EventTypes { * 'event-with-parameters': any[] * 'event-with-example-handler': (...args: any[]) => void * } * ``` */ export type ValidEventTypes = string | symbol | object; export type EventNames = T extends string | symbol ? T : keyof T; export type ArgumentMap = { [K in keyof T]: T[K] extends (...args: any[]) => void ? Parameters : T[K] extends any[] ? T[K] : any[]; }; export type EventListener> = T extends string | symbol ? (...args: any[]) => void : (...args: ArgumentMap>[Extract]) => void; export type EventArgs> = Parameters>; export const EventEmitter: EventEmitterStatic; } declare type RgbColor = { r: number; g: number; b: number; }; declare type HslColor = { h: number; s: number; l: number; }; declare type HsvColor = { h: number; s: number; v: number; }; declare type WithAlpha = O & { a: number; }; declare type RgbaColor = WithAlpha; declare type HslaColor = WithAlpha; declare type HsvaColor = WithAlpha; /** * Array of RGBA color components, where each component is a number between 0 and 1. * The array must contain exactly 4 numbers in the order: red, green, blue, alpha. * @example * ```ts * // Full white (opaque) * const white: RgbaArray = [1, 1, 1, 1]; * * // Semi-transparent red * const transparentRed: RgbaArray = [1, 0, 0, 0.5]; * ``` * @remarks * - All components must be between 0 and 1 * - Array must contain exactly 4 values * - Order is [red, green, blue, alpha] * @see {@link Color} For the main color utility class * @category color * @standard */ export type RgbaArray = [ number, number, number, number ]; /** * Valid color formats supported by PixiJS. These types extend from [colord](https://www.npmjs.com/package/colord) * with additional PixiJS-specific formats. * * Common Formats: * ```ts * // CSS Color Names * new Color('red'); * new Color('blue'); * new Color('green'); * * // Hex Values * new Color(0xff0000); // RGB integer * new Color('#ff0000'); // 6-digit hex * new Color('#f00'); // 3-digit hex * new Color('#ff0000ff'); // 8-digit hex (with alpha) * new Color('#f00f'); // 4-digit hex (with alpha) * * // RGB/RGBA Objects * new Color({ r: 255, g: 0, b: 0 }); * new Color({ r: 255, g: 0, b: 0, a: 0.5 }); * * // RGB/RGBA Strings * new Color('rgb(255, 0, 0)'); * new Color('rgba(255, 0, 0, 0.5)'); * new Color('rgb(100% 0% 0%)'); * new Color('rgba(100% 0% 0% / 50%)'); * * // Arrays (normalized 0-1) * new Color([1, 0, 0]); // RGB * new Color([1, 0, 0, 0.5]); // RGBA * new Color(new Float32Array([1, 0, 0, 0.5])); * * // Arrays (0-255) * new Color(new Uint8Array([255, 0, 0])); * new Color(new Uint8ClampedArray([255, 0, 0, 128])); * * // HSL/HSLA * new Color({ h: 0, s: 100, l: 50 }); * new Color({ h: 0, s: 100, l: 50, a: 0.5 }); * new Color('hsl(0, 100%, 50%)'); * new Color('hsla(0deg 100% 50% / 50%)'); * * // HSV/HSVA * new Color({ h: 0, s: 100, v: 100 }); * new Color({ h: 0, s: 100, v: 100, a: 0.5 }); * ``` * @remarks * - All color values are normalized internally to 0-1 range * - Alpha is always between 0-1 * - Invalid colors will throw an error * - Original format is preserved when possible * @see {@link Color} For the main color utility class * @see {@link https://www.w3.org/TR/css-color-4} CSS Color Level 4 Specification * @since 7.2.0 * @category color * @standard */ export type ColorSource = string | number | number[] | Float32Array | Uint8Array | Uint8ClampedArray | HslColor | HslaColor | HsvColor | HsvaColor | RgbColor | RgbaColor | Color | number; /** * Color utility class for managing colors in various formats. Provides a unified way to work * with colors across your PixiJS application. * * Features: * - Accepts multiple color formats (hex, RGB, HSL, etc.) * - Automatic format conversion * - Color manipulation methods * - Component access (r,g,b,a) * - Chainable operations * @example * ```js * import { Color } from 'pixi.js'; * * new Color('red').toArray(); // [1, 0, 0, 1] * new Color(0xff0000).toArray(); // [1, 0, 0, 1] * new Color('ff0000').toArray(); // [1, 0, 0, 1] * new Color('#f00').toArray(); // [1, 0, 0, 1] * new Color('0xff0000ff').toArray(); // [1, 0, 0, 1] * new Color('#f00f').toArray(); // [1, 0, 0, 1] * new Color({ r: 255, g: 0, b: 0, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * new Color('rgb(255, 0, 0, 0.5)').toArray(); // [1, 0, 0, 0.5] * new Color([1, 1, 1]).toArray(); // [1, 1, 1, 1] * new Color([1, 0, 0, 0.5]).toArray(); // [1, 0, 0, 0.5] * new Color(new Float32Array([1, 0, 0, 0.5])).toArray(); // [1, 0, 0, 0.5] * new Color(new Uint8Array([255, 0, 0, 255])).toArray(); // [1, 0, 0, 1] * new Color(new Uint8ClampedArray([255, 0, 0, 255])).toArray(); // [1, 0, 0, 1] * new Color({ h: 0, s: 100, l: 50, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * new Color('hsl(0, 100%, 50%, 50%)').toArray(); // [1, 0, 0, 0.5] * new Color({ h: 0, s: 100, v: 100, a: 0.5 }).toArray(); // [1, 0, 0, 0.5] * * // Convert between formats * const color = new Color('red'); * color.toHex(); // "#ff0000" * color.toRgbString(); // "rgb(255,0,0,1)" * color.toNumber(); // 0xff0000 * * // Access components * color.red; // 1 * color.green; // 0 * color.blue; // 0 * color.alpha; // 1 * * // Chain operations * color * .setAlpha(0.5) * .multiply([0.5, 0.5, 0.5]) * .premultiply(0.8); * ``` * @remarks * The Color class automatically normalizes all color values internally: * - RGB components are stored as floats between 0-1 * - Alpha is always between 0-1 * - Color operations clamp values to valid ranges * - Original input format is preserved when possible * @since 7.2.0 * @category color * @standard */ export declare class Color { /** * Static shared Color instance used for utility operations. This is a singleton color object * that can be reused to avoid creating unnecessary Color instances. * > [!IMPORTANT] You should be careful when using this shared instance, as it is mutable and can be * > changed by any code that uses it. * > * > It is best used for one-off color operations or temporary transformations. * > For persistent colors, create your own Color instance instead. * @example * ```ts * import { Color } from 'pixi.js'; * * // Use shared instance for one-off color operations * Color.shared.setValue(0xff0000); * const redHex = Color.shared.toHex(); // "#ff0000" * const redRgb = Color.shared.toRgbArray(); // [1, 0, 0] * * // Temporary color transformations * const colorNumber = Color.shared * .setValue('#ff0000') // Set to red * .setAlpha(0.5) // Make semi-transparent * .premultiply(0.8) // Apply premultiplication * .toNumber(); // Convert to number * * // Chain multiple operations * const result = Color.shared * .setValue(someColor) * .multiply(tintColor) * .toPremultiplied(alpha); * ``` * @remarks * - This is a shared instance - be careful about multiple code paths using it simultaneously * - Use for temporary color operations to avoid allocating new Color instances * - The value is preserved between operations, so reset if needed * - For persistent colors, create your own Color instance instead */ static readonly shared: Color; /** * Temporary Color object for static uses internally. * As to not conflict with Color.shared. * @ignore */ private static readonly _temp; /** Pattern for hex strings */ private static readonly HEX_PATTERN; /** Internal color source, from constructor or set value */ private _value; /** Normalized rgba component, floats from 0-1 */ private _components; /** Cache color as number */ private _int; /** An array of the current Color. Only populated when `toArray` functions are called */ private _arrayRgba; private _arrayRgb; /** * @param {ColorSource} value - Optional value to use, if not provided, white is used. */ constructor(value?: ColorSource); /** * Get the red component of the color, normalized between 0 and 1. * @example * ```ts * const color = new Color('red'); * console.log(color.red); // 1 * * const green = new Color('#00ff00'); * console.log(green.red); // 0 * ``` */ get red(): number; /** * Get the green component of the color, normalized between 0 and 1. * @example * ```ts * const color = new Color('lime'); * console.log(color.green); // 1 * * const red = new Color('#ff0000'); * console.log(red.green); // 0 * ``` */ get green(): number; /** * Get the blue component of the color, normalized between 0 and 1. * @example * ```ts * const color = new Color('blue'); * console.log(color.blue); // 1 * * const yellow = new Color('#ffff00'); * console.log(yellow.blue); // 0 * ``` */ get blue(): number; /** * Get the alpha component of the color, normalized between 0 and 1. * @example * ```ts * const color = new Color('red'); * console.log(color.alpha); // 1 (fully opaque) * * const transparent = new Color('rgba(255, 0, 0, 0.5)'); * console.log(transparent.alpha); // 0.5 (semi-transparent) * ``` */ get alpha(): number; /** * Sets the color value and returns the instance for chaining. * * This is a chainable version of setting the `value` property. * @param value - The color to set. Accepts various formats: * - Hex strings/numbers (e.g., '#ff0000', 0xff0000) * - RGB/RGBA values (arrays, objects) * - CSS color names * - HSL/HSLA values * - HSV/HSVA values * @returns The Color instance for chaining * @example * ```ts * // Basic usage * const color = new Color(); * color.setValue('#ff0000') * .setAlpha(0.5) * .premultiply(0.8); * * // Different formats * color.setValue(0xff0000); // Hex number * color.setValue('#ff0000'); // Hex string * color.setValue([1, 0, 0]); // RGB array * color.setValue([1, 0, 0, 0.5]); // RGBA array * color.setValue({ r: 1, g: 0, b: 0 }); // RGB object * * // Copy from another color * const red = new Color('red'); * color.setValue(red); * ``` * @throws {Error} If the color value is invalid or null * @see {@link Color.value} For the underlying value property */ setValue(value: ColorSource): this; /** * The current color source. This property allows getting and setting the color value * while preserving the original format where possible. * @remarks * When setting: * - Setting to a `Color` instance copies its source and components * - Setting to other valid sources normalizes and stores the value * - Setting to `null` throws an Error * - The color remains unchanged if normalization fails * * When getting: * - Returns `null` if color was modified by {@link Color.multiply} or {@link Color.premultiply} * - Otherwise returns the original color source * @example * ```ts * // Setting different color formats * const color = new Color(); * * color.value = 0xff0000; // Hex number * color.value = '#ff0000'; // Hex string * color.value = [1, 0, 0]; // RGB array * color.value = [1, 0, 0, 0.5]; // RGBA array * color.value = { r: 1, g: 0, b: 0 }; // RGB object * * // Copying from another color * const red = new Color('red'); * color.value = red; // Copies red's components * * // Getting the value * console.log(color.value); // Returns original format * * // After modifications * color.multiply([0.5, 0.5, 0.5]); * console.log(color.value); // Returns null * ``` * @throws {Error} When attempting to set `null` */ set value(value: ColorSource | null); get value(): Exclude | null; /** * Copy a color source internally. * @param value - Color source */ private _cloneSource; /** * Equality check for color sources. * @param value1 - First color source * @param value2 - Second color source * @returns `true` if the color sources are equal, `false` otherwise. */ private _isSourceEqual; /** * Convert to a RGBA color object with normalized components (0-1). * @example * ```ts * import { Color } from 'pixi.js'; * * // Convert colors to RGBA objects * new Color('white').toRgba(); // returns { r: 1, g: 1, b: 1, a: 1 } * new Color('#ff0000').toRgba(); // returns { r: 1, g: 0, b: 0, a: 1 } * * // With transparency * new Color('rgba(255,0,0,0.5)').toRgba(); // returns { r: 1, g: 0, b: 0, a: 0.5 } * ``` * @returns An RGBA object with normalized components */ toRgba(): RgbaColor; /** * Convert to a RGB color object with normalized components (0-1). * * Alpha component is omitted in the output. * @example * ```ts * import { Color } from 'pixi.js'; * * // Convert colors to RGB objects * new Color('white').toRgb(); // returns { r: 1, g: 1, b: 1 } * new Color('#ff0000').toRgb(); // returns { r: 1, g: 0, b: 0 } * * // Alpha is ignored * new Color('rgba(255,0,0,0.5)').toRgb(); // returns { r: 1, g: 0, b: 0 } * ``` * @returns An RGB object with normalized components */ toRgb(): RgbColor; /** * Convert to a CSS-style rgba string representation. * * RGB components are scaled to 0-255 range, alpha remains 0-1. * @example * ```ts * import { Color } from 'pixi.js'; * * // Convert colors to RGBA strings * new Color('white').toRgbaString(); // returns "rgba(255,255,255,1)" * new Color('#ff0000').toRgbaString(); // returns "rgba(255,0,0,1)" * * // With transparency * new Color([1, 0, 0, 0.5]).toRgbaString(); // returns "rgba(255,0,0,0.5)" * ``` * @returns A CSS-compatible rgba string */ toRgbaString(): string; /** * Convert to an [R, G, B] array of clamped uint8 values (0 to 255). * @param {number[]|Uint8Array|Uint8ClampedArray} [out] - Optional output array. If not provided, * a cached array will be used and returned. * @returns Array containing RGB components as integers between 0-255 * @example * ```ts * // Basic usage * new Color('white').toUint8RgbArray(); // returns [255, 255, 255] * new Color('#ff0000').toUint8RgbArray(); // returns [255, 0, 0] * * // Using custom output array * const rgb = new Uint8Array(3); * new Color('blue').toUint8RgbArray(rgb); // rgb is now [0, 0, 255] * * // Using different array types * new Color('red').toUint8RgbArray(new Uint8ClampedArray(3)); // [255, 0, 0] * new Color('red').toUint8RgbArray([]); // [255, 0, 0] * ``` * @remarks * - Output values are always clamped between 0-255 * - Alpha component is not included in output * - Reuses internal cache array if no output array provided */ toUint8RgbArray(out?: T): T; /** * Convert to an [R, G, B, A] array of normalized floats (numbers from 0.0 to 1.0). * @param {number[]|Float32Array} [out] - Optional output array. If not provided, * a cached array will be used and returned. * @returns Array containing RGBA components as floats between 0-1 * @example * ```ts * // Basic usage * new Color('white').toArray(); // returns [1, 1, 1, 1] * new Color('red').toArray(); // returns [1, 0, 0, 1] * * // With alpha * new Color('rgba(255,0,0,0.5)').toArray(); // returns [1, 0, 0, 0.5] * * // Using custom output array * const rgba = new Float32Array(4); * new Color('blue').toArray(rgba); // rgba is now [0, 0, 1, 1] * ``` * @remarks * - Output values are normalized between 0-1 * - Includes alpha component as the fourth value * - Reuses internal cache array if no output array provided */ toArray(out?: T): T; /** * Convert to an [R, G, B] array of normalized floats (numbers from 0.0 to 1.0). * @param {number[]|Float32Array} [out] - Optional output array. If not provided, * a cached array will be used and returned. * @returns Array containing RGB components as floats between 0-1 * @example * ```ts * // Basic usage * new Color('white').toRgbArray(); // returns [1, 1, 1] * new Color('red').toRgbArray(); // returns [1, 0, 0] * * // Using custom output array * const rgb = new Float32Array(3); * new Color('blue').toRgbArray(rgb); // rgb is now [0, 0, 1] * ``` * @remarks * - Output values are normalized between 0-1 * - Alpha component is omitted from output * - Reuses internal cache array if no output array provided */ toRgbArray(out?: T): T; /** * Convert to a hexadecimal number. * @returns The color as a 24-bit RGB integer * @example * ```ts * // Basic usage * new Color('white').toNumber(); // returns 0xffffff * new Color('red').toNumber(); // returns 0xff0000 * * // Store as hex * const color = new Color('blue'); * const hex = color.toNumber(); // 0x0000ff * ``` */ toNumber(): number; /** * Convert to a BGR number. * * Useful for platforms that expect colors in BGR format. * @returns The color as a 24-bit BGR integer * @example * ```ts * // Convert RGB to BGR * new Color(0xffcc99).toBgrNumber(); // returns 0x99ccff * * // Common use case: platform-specific color format * const color = new Color('orange'); * const bgrColor = color.toBgrNumber(); // Color with swapped R/B channels * ``` * @remarks * This swaps the red and blue channels compared to the normal RGB format: * - RGB 0xRRGGBB becomes BGR 0xBBGGRR */ toBgrNumber(): number; /** * Convert to a hexadecimal number in little endian format (e.g., BBGGRR). * * Useful for platforms that expect colors in little endian byte order. * @example * ```ts * import { Color } from 'pixi.js'; * * // Convert RGB color to little endian format * new Color(0xffcc99).toLittleEndianNumber(); // returns 0x99ccff * * // Common use cases: * const color = new Color('orange'); * const leColor = color.toLittleEndianNumber(); // Swaps byte order for LE systems * * // Multiple conversions * const colors = { * normal: 0xffcc99, * littleEndian: new Color(0xffcc99).toLittleEndianNumber(), // 0x99ccff * backToNormal: new Color(0x99ccff).toLittleEndianNumber() // 0xffcc99 * }; * ``` * @remarks * - Swaps R and B channels in the color value * - RGB 0xRRGGBB becomes 0xBBGGRR * - Useful for systems that use little endian byte order * - Can be used to convert back and forth between formats * @returns The color as a number in little endian format (BBGGRR) * @see {@link Color.toBgrNumber} For BGR format without byte swapping */ toLittleEndianNumber(): number; /** * Multiply with another color. * * This action is destructive and modifies the original color. * @param {ColorSource} value - The color to multiply by. Accepts any valid color format: * - Hex strings/numbers (e.g., '#ff0000', 0xff0000) * - RGB/RGBA arrays ([1, 0, 0], [1, 0, 0, 1]) * - Color objects ({ r: 1, g: 0, b: 0 }) * - CSS color names ('red', 'blue') * @returns this - The Color instance for chaining * @example * ```ts * // Basic multiplication * const color = new Color('#ff0000'); * color.multiply(0x808080); // 50% darker red * * // With transparency * color.multiply([1, 1, 1, 0.5]); // 50% transparent * * // Chain operations * color * .multiply('#808080') * .multiply({ r: 1, g: 1, b: 1, a: 0.5 }); * ``` * @remarks * - Multiplies each RGB component and alpha separately * - Values are clamped between 0-1 * - Original color format is lost (value becomes null) * - Operation cannot be undone */ multiply(value: ColorSource): this; /** * Converts color to a premultiplied alpha format. * * This action is destructive and modifies the original color. * @param alpha - The alpha value to multiply by (0-1) * @param {boolean} [applyToRGB=true] - Whether to premultiply RGB channels * @returns {Color} The Color instance for chaining * @example * ```ts * // Basic premultiplication * const color = new Color('red'); * color.premultiply(0.5); // 50% transparent red with premultiplied RGB * * // Alpha only (RGB unchanged) * color.premultiply(0.5, false); // 50% transparent, original RGB * * // Chain with other operations * color * .multiply(0x808080) * .premultiply(0.5) * .toNumber(); * ``` * @remarks * - RGB channels are multiplied by alpha when applyToRGB is true * - Alpha is always set to the provided value * - Values are clamped between 0-1 * - Original color format is lost (value becomes null) * - Operation cannot be undone */ premultiply(alpha: number, applyToRGB?: boolean): this; /** * Returns the color as a 32-bit premultiplied alpha integer. * * Format: 0xAARRGGBB * @param {number} alpha - The alpha value to multiply by (0-1) * @param {boolean} [applyToRGB=true] - Whether to premultiply RGB channels * @returns {number} The premultiplied color as a 32-bit integer * @example * ```ts * // Convert to premultiplied format * const color = new Color('red'); * * // Full opacity (0xFFRRGGBB) * color.toPremultiplied(1.0); // 0xFFFF0000 * * // 50% transparency with premultiplied RGB * color.toPremultiplied(0.5); // 0x7F7F0000 * * // 50% transparency without RGB premultiplication * color.toPremultiplied(0.5, false); // 0x7FFF0000 * ``` * @remarks * - Returns full opacity (0xFF000000) when alpha is 1.0 * - Returns 0 when alpha is 0.0 and applyToRGB is true * - RGB values are rounded during premultiplication */ toPremultiplied(alpha: number, applyToRGB?: boolean): number; /** * Convert to a hexadecimal string (6 characters). * @returns A CSS-compatible hex color string (e.g., "#ff0000") * @example * ```ts * import { Color } from 'pixi.js'; * * // Basic colors * new Color('red').toHex(); // returns "#ff0000" * new Color('white').toHex(); // returns "#ffffff" * new Color('black').toHex(); // returns "#000000" * * // From different formats * new Color(0xff0000).toHex(); // returns "#ff0000" * new Color([1, 0, 0]).toHex(); // returns "#ff0000" * new Color({ r: 1, g: 0, b: 0 }).toHex(); // returns "#ff0000" * ``` * @remarks * - Always returns a 6-character hex string * - Includes leading "#" character * - Alpha channel is ignored * - Values are rounded to nearest hex value */ toHex(): string; /** * Convert to a hexadecimal string with alpha (8 characters). * @returns A CSS-compatible hex color string with alpha (e.g., "#ff0000ff") * @example * ```ts * import { Color } from 'pixi.js'; * * // Fully opaque colors * new Color('red').toHexa(); // returns "#ff0000ff" * new Color('white').toHexa(); // returns "#ffffffff" * * // With transparency * new Color('rgba(255, 0, 0, 0.5)').toHexa(); // returns "#ff00007f" * new Color([1, 0, 0, 0]).toHexa(); // returns "#ff000000" * ``` * @remarks * - Returns an 8-character hex string * - Includes leading "#" character * - Alpha is encoded in last two characters * - Values are rounded to nearest hex value */ toHexa(): string; /** * Set alpha (transparency) value while preserving color components. * * Provides a chainable interface for setting alpha. * @param alpha - Alpha value between 0 (fully transparent) and 1 (fully opaque) * @returns The Color instance for chaining * @example * ```ts * // Basic alpha setting * const color = new Color('red'); * color.setAlpha(0.5); // 50% transparent red * * // Chain with other operations * color * .setValue('#ff0000') * .setAlpha(0.8) // 80% opaque * .premultiply(0.5); // Further modify alpha * * // Reset to fully opaque * color.setAlpha(1); * ``` * @remarks * - Alpha value is clamped between 0-1 * - Can be chained with other color operations */ setAlpha(alpha: number): this; /** * Normalize the input value into rgba * @param value - Input value */ private _normalize; /** Refresh the internal color rgb number */ private _refreshInt; /** * Clamps values to a range. Will override original values * @param value - Value(s) to clamp * @param min - Minimum value * @param max - Maximum value */ private _clamp; /** * Check if a value can be interpreted as a valid color format. * Supports all color formats that can be used with the Color class. * @param value - Value to check * @returns True if the value can be used as a color * @example * ```ts * import { Color } from 'pixi.js'; * * // CSS colors and hex values * Color.isColorLike('red'); // true * Color.isColorLike('#ff0000'); // true * Color.isColorLike(0xff0000); // true * * // Arrays (RGB/RGBA) * Color.isColorLike([1, 0, 0]); // true * Color.isColorLike([1, 0, 0, 0.5]); // true * * // TypedArrays * Color.isColorLike(new Float32Array([1, 0, 0])); // true * Color.isColorLike(new Uint8Array([255, 0, 0])); // true * Color.isColorLike(new Uint8ClampedArray([255, 0, 0])); // true * * // Object formats * Color.isColorLike({ r: 1, g: 0, b: 0 }); // true (RGB) * Color.isColorLike({ r: 1, g: 0, b: 0, a: 0.5 }); // true (RGBA) * Color.isColorLike({ h: 0, s: 100, l: 50 }); // true (HSL) * Color.isColorLike({ h: 0, s: 100, l: 50, a: 0.5 }); // true (HSLA) * Color.isColorLike({ h: 0, s: 100, v: 100 }); // true (HSV) * Color.isColorLike({ h: 0, s: 100, v: 100, a: 0.5 });// true (HSVA) * * // Color instances * Color.isColorLike(new Color('red')); // true * * // Invalid values * Color.isColorLike(null); // false * Color.isColorLike(undefined); // false * Color.isColorLike({}); // false * Color.isColorLike([]); // false * Color.isColorLike('not-a-color'); // false * ``` * @remarks * Checks for the following formats: * - Numbers (0x000000 to 0xffffff) * - CSS color strings * - RGB/RGBA arrays and objects * - HSL/HSLA objects * - HSV/HSVA objects * - TypedArrays (Float32Array, Uint8Array, Uint8ClampedArray) * - Color instances * @see {@link ColorSource} For supported color format types * @see {@link Color.setValue} For setting color values * @category utility */ static isColorLike(value: unknown): value is ColorSource; } /** * Common interface for points. Both Point and ObservablePoint implement it. * Provides a standard way to represent 2D coordinates. * * Many PixiJS methods accept PointData for transformations, * making it easy to work with different point types interchangeably. * @example * ```ts * // Create an object implementing PointData * const point: PointData = { x: 100, y: 200 }; * * // Use with matrix transformations * const matrix = new Matrix(); * matrix.translate(50, 50).apply(point); * * // Mix with other point types * const observablePoint = new ObservablePoint(() => {}, null, 0, 0); * const regularPoint = new Point(0, 0); * // All are PointData compatible * ``` * @remarks * - Basic x,y coordinate interface * - Used by Point and ObservablePoint * @see {@link Point} For standard point implementation * @see {@link ObservablePoint} For observable point implementation * @category maths * @standard */ export interface PointData { /** X coordinate */ x: number; /** Y coordinate */ y: number; } /** * Common interface for points with manipulation methods. * * Extends PointData to add operations for copying, comparison and setting values. * @example * ```ts * // Basic point manipulation * const point: PointLike = new Point(10, 20); * point.set(30, 40); * * // Copy between points * const other = new Point(); * point.copyTo(other); * * // Compare points * const same = point.equals(other); // true * ``` * @see {@link PointData} For basic x,y interface * @see {@link Point} For standard implementation * @see {@link ObservablePoint} For observable implementation * @category maths * @standard */ export interface PointLike extends PointData { /** * Copies x and y from the given point * @param {PointData} p - The point to copy from * @returns {this} Returns itself. * @example * ```ts * const point1: PointLike = new Point(10, 20); * const point2: PointLike = new Point(30, 40); * point1.copyFrom(point2); * console.log(point1.x, point1.y); // 30, 40 * ``` */ copyFrom: (p: PointData) => this; /** * Copies x and y into the given point * @param {PointLike} p - The point to copy. * @returns {PointLike} Given point with values updated * @example * ```ts * const point1: PointLike = new Point(10, 20); * const point2: PointLike = new Point(0, 0); * point1.copyTo(point2); * console.log(point2.x, point2.y); // 10, 20 * ``` */ copyTo: (p: T) => T; /** * Returns true if the given point is equal to this point * @param {PointData} p - The point to check * @returns {boolean} Whether the given point equal to this point * @example * ```ts * const point1: PointLike = new Point(10, 20); * const point2: PointLike = new Point(10, 20); * const point3: PointLike = new Point(30, 40); * console.log(point1.equals(point2)); // true * console.log(point1.equals(point3)); // false * ``` */ equals: (p: PointData) => boolean; /** * Sets the point to a new x and y position. * If y is omitted, both x and y will be set to x. * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=x] - position of the point on the y axis * @example * ```ts * const point: PointLike = new Point(10, 20); * point.set(30, 40); * console.log(point.x, point.y); // 30, 40 * point.set(50); // Sets both x and y to 50 * console.log(point.x, point.y); // 50, 50 * ``` */ set: (x?: number, y?: number) => void; } export interface Point extends PixiMixins.Point { } /** * The Point object represents a location in a two-dimensional coordinate system, where `x` represents * the position on the horizontal axis and `y` represents the position on the vertical axis. * * Many Pixi functions accept the `PointData` type as an alternative to `Point`, * which only requires `x` and `y` properties. * @example * ```ts * // Basic point creation * const point = new Point(100, 200); * * // Using with transformations * const matrix = new Matrix(); * matrix.translate(50, 50).apply(point); * * // Point arithmetic * const start = new Point(0, 0); * const end = new Point(100, 100); * const middle = new Point( * (start.x + end.x) / 2, * (start.y + end.y) / 2 * ); * ``` * @see {@link PointData} For basic x,y interface * @see {@link PointLike} For point manipulation interface * @see {@link ObservablePoint} For observable version * @category maths * @standard */ export declare class Point implements PointLike { /** * Position of the point on the x axis * @example * ```ts * // Set x position * const point = new Point(); * point.x = 100; * * // Use in calculations * const width = rightPoint.x - leftPoint.x; * ``` */ x: number; /** * Position of the point on the y axis * @example * ```ts * // Set y position * const point = new Point(); * point.y = 200; * * // Use in calculations * const height = bottomPoint.y - topPoint.y; * ``` */ y: number; /** * Creates a new `Point` * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=0] - position of the point on the y axis */ constructor(x?: number, y?: number); /** * Creates a clone of this point, which is a new instance with the same `x` and `y` values. * @example * ```ts * // Basic point cloning * const original = new Point(100, 200); * const copy = original.clone(); * * // Clone and modify * const modified = original.clone(); * modified.set(300, 400); * * // Verify independence * console.log(original); // Point(100, 200) * console.log(modified); // Point(300, 400) * ``` * @remarks * - Creates new Point instance * - Deep copies x and y values * - Independent from original * - Useful for preserving values * @returns A clone of this point * @see {@link Point.copyFrom} For copying into existing point * @see {@link Point.copyTo} For copying to existing point */ clone(): Point; /** * Copies x and y from the given point into this point. * @example * ```ts * // Basic copying * const source = new Point(100, 200); * const target = new Point(); * target.copyFrom(source); * * // Copy and chain operations * const point = new Point() * .copyFrom(source) * .set(x + 50, y + 50); * * // Copy from any PointData * const data = { x: 10, y: 20 }; * point.copyFrom(data); * ``` * @param p - The point to copy from * @returns The point instance itself * @see {@link Point.copyTo} For copying to another point * @see {@link Point.clone} For creating new point copy */ copyFrom(p: PointData): this; /** * Copies this point's x and y into the given point. * @example * ```ts * // Basic copying * const source = new Point(100, 200); * const target = new Point(); * source.copyTo(target); * ``` * @param p - The point to copy to. Can be any type that is or extends `PointLike` * @returns The point (`p`) with values updated * @see {@link Point.copyFrom} For copying from another point * @see {@link Point.clone} For creating new point copy */ copyTo(p: T): T; /** * Checks if another point is equal to this point. * * Compares x and y values using strict equality. * @example * ```ts * // Basic equality check * const p1 = new Point(100, 200); * const p2 = new Point(100, 200); * console.log(p1.equals(p2)); // true * * // Compare with PointData * const data = { x: 100, y: 200 }; * console.log(p1.equals(data)); // true * * // Check different points * const p3 = new Point(200, 300); * console.log(p1.equals(p3)); // false * ``` * @param p - The point to check * @returns `true` if both `x` and `y` are equal * @see {@link Point.copyFrom} For making points equal * @see {@link PointData} For point data interface */ equals(p: PointData): boolean; /** * Sets the point to a new x and y position. * * If y is omitted, both x and y will be set to x. * @example * ```ts * // Basic position setting * const point = new Point(); * point.set(100, 200); * * // Set both x and y to same value * point.set(50); // x=50, y=50 * * // Chain with other operations * point * .set(10, 20) * .copyTo(otherPoint); * ``` * @param x - Position on the x axis * @param y - Position on the y axis, defaults to x * @returns The point instance itself * @see {@link Point.copyFrom} For copying from another point * @see {@link Point.equals} For comparing positions */ set(x?: number, y?: number): this; toString(): string; /** * A static Point object with `x` and `y` values of `0`. * * This shared instance is reset to zero values when accessed. * * > [!IMPORTANT] This point is shared and temporary. Do not store references to it. * @example * ```ts * // Use for temporary calculations * const tempPoint = Point.shared; * tempPoint.set(100, 200); * matrix.apply(tempPoint); * * // Will be reset to (0,0) on next access * const fresh = Point.shared; // x=0, y=0 * ``` * @readonly * @returns A fresh zeroed point for temporary use * @see {@link Point.constructor} For creating new points * @see {@link PointData} For basic point interface */ static get shared(): Point; } /** * The data structure that contains the position, scale, pivot, skew and rotation of an object. * This is used by the {@link Matrix} class to decompose the matrix into its components. * @category maths * @advanced */ export interface TransformableObject { /** The position of the object */ position: PointData; /** The scale of the object */ scale: PointData; /** The pivot of the object */ pivot: PointData; /** The skew of the object */ skew: PointData; /** The rotation of the object */ rotation: number; } /** * A fast matrix for 2D transformations. * Represents a 3x3 transformation matrix: * * ```js * | a c tx | * | b d ty | * | 0 0 1 | * ``` * @example * ```ts * // Create identity matrix * const matrix = new Matrix(); * * // Create matrix with custom values * const transform = new Matrix(2, 0, 0, 2, 100, 100); // Scale 2x, translate 100,100 * * // Transform a point * const point = { x: 10, y: 20 }; * const transformed = transform.apply(point); * * // Chain transformations * matrix * .translate(100, 50) * .rotate(Math.PI / 4) * .scale(2, 2); * ``` * @remarks * - Used for transform hierarchies * - Supports scale, rotation, position * - Can be concatenated with append/prepend * - Efficient for batched transformations * @category maths * @standard */ export declare class Matrix { /** * Scale on the x axis. * @default 1 */ a: number; /** * Shear on the y axis. * @default 0 */ b: number; /** * Shear on the x axis. * @default 0 */ c: number; /** * Scale on the y axis. * @default 1 */ d: number; /** * Translation on the x axis. * @default 0 */ tx: number; /** * Translation on the y axis. * @default 0 */ ty: number; /** * Array representation of the matrix. * Only populated when `toArray()` is called. * @default null * @see {@link Matrix.toArray} For filling this array */ array: Float32Array | null; /** * @param a - x scale * @param b - y skew * @param c - x skew * @param d - y scale * @param tx - x translation * @param ty - y translation */ constructor(a?: number, b?: number, c?: number, d?: number, tx?: number, ty?: number); /** * Creates a Matrix object based on the given array. * Populates matrix components from a flat array in column-major order. * * > [!NOTE] Array mapping order: * > ``` * > array[0] = a (x scale) * > array[1] = b (y skew) * > array[2] = tx (x translation) * > array[3] = c (x skew) * > array[4] = d (y scale) * > array[5] = ty (y translation) * > ``` * @example * ```ts * // Create matrix from array * const matrix = new Matrix(); * matrix.fromArray([ * 2, 0, 100, // a, b, tx * 0, 2, 100 // c, d, ty * ]); * * // Create matrix from typed array * const float32Array = new Float32Array([ * 1, 0, 0, // Scale x1, no skew * 0, 1, 0 // No skew, scale x1 * ]); * matrix.fromArray(float32Array); * ``` * @param array - The array to populate the matrix from * @see {@link Matrix.toArray} For converting matrix to array * @see {@link Matrix.set} For setting values directly */ fromArray(array: number[]): void; /** * Sets the matrix properties directly. * All matrix components can be set in one call. * @example * ```ts * // Set to identity matrix * matrix.set(1, 0, 0, 1, 0, 0); * * // Set to scale matrix * matrix.set(2, 0, 0, 2, 0, 0); // Scale 2x * * // Set to translation matrix * matrix.set(1, 0, 0, 1, 100, 50); // Move 100,50 * ``` * @param a - Scale on x axis * @param b - Shear on y axis * @param c - Shear on x axis * @param d - Scale on y axis * @param tx - Translation on x axis * @param ty - Translation on y axis * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.identity} For resetting to identity * @see {@link Matrix.fromArray} For setting from array */ set(a: number, b: number, c: number, d: number, tx: number, ty: number): this; /** * Creates an array from the current Matrix object. * * > [!NOTE] The array format is: * > ``` * > Non-transposed: * > [a, c, tx, * > b, d, ty, * > 0, 0, 1] * > * > Transposed: * > [a, b, 0, * > c, d, 0, * > tx,ty,1] * > ``` * @example * ```ts * // Basic array conversion * const matrix = new Matrix(2, 0, 0, 2, 100, 100); * const array = matrix.toArray(); * * // Using existing array * const float32Array = new Float32Array(9); * matrix.toArray(false, float32Array); * * // Get transposed array * const transposed = matrix.toArray(true); * ``` * @param transpose - Whether to transpose the matrix * @param out - Optional Float32Array to store the result * @returns The array containing the matrix values * @see {@link Matrix.fromArray} For creating matrix from array * @see {@link Matrix.array} For cached array storage */ toArray(transpose?: boolean, out?: Float32Array): Float32Array; /** * Get a new position with the current transformation applied. * * Can be used to go from a child's coordinate space to the world coordinate space. (e.g. rendering) * @example * ```ts * // Basic point transformation * const matrix = new Matrix().translate(100, 50).rotate(Math.PI / 4); * const point = new Point(10, 20); * const transformed = matrix.apply(point); * * // Reuse existing point * const output = new Point(); * matrix.apply(point, output); * ``` * @param pos - The origin point to transform * @param newPos - Optional point to store the result * @returns The transformed point * @see {@link Matrix.applyInverse} For inverse transformation * @see {@link Point} For point operations */ apply

(pos: PointData, newPos?: P): P; /** * Get a new position with the inverse of the current transformation applied. * * Can be used to go from the world coordinate space to a child's coordinate space. (e.g. input) * @example * ```ts * // Basic inverse transformation * const matrix = new Matrix().translate(100, 50).rotate(Math.PI / 4); * const worldPoint = new Point(150, 100); * const localPoint = matrix.applyInverse(worldPoint); * * // Reuse existing point * const output = new Point(); * matrix.applyInverse(worldPoint, output); * * // Convert mouse position to local space * const mousePoint = new Point(mouseX, mouseY); * const localMouse = matrix.applyInverse(mousePoint); * ``` * @param pos - The origin point to inverse-transform * @param newPos - Optional point to store the result * @returns The inverse-transformed point * @see {@link Matrix.apply} For forward transformation * @see {@link Matrix.invert} For getting inverse matrix */ applyInverse

(pos: PointData, newPos?: P): P; /** * Translates the matrix on the x and y axes. * Adds to the position values while preserving scale, rotation and skew. * @example * ```ts * // Basic translation * const matrix = new Matrix(); * matrix.translate(100, 50); // Move right 100, down 50 * * // Chain with other transformations * matrix * .scale(2, 2) * .translate(100, 0) * .rotate(Math.PI / 4); * ``` * @param x - How much to translate on the x axis * @param y - How much to translate on the y axis * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.set} For setting position directly * @see {@link Matrix.setTransform} For complete transform setup */ translate(x: number, y: number): this; /** * Applies a scale transformation to the matrix. * Multiplies the scale values with existing matrix components. * @example * ```ts * // Basic scaling * const matrix = new Matrix(); * matrix.scale(2, 3); // Scale 2x horizontally, 3x vertically * * // Chain with other transformations * matrix * .translate(100, 100) * .scale(2, 2) // Scales after translation * .rotate(Math.PI / 4); * ``` * @param x - The amount to scale horizontally * @param y - The amount to scale vertically * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.setTransform} For setting scale directly * @see {@link Matrix.append} For combining transformations */ scale(x: number, y: number): this; /** * Applies a rotation transformation to the matrix. * * Rotates around the origin (0,0) by the given angle in radians. * @example * ```ts * // Basic rotation * const matrix = new Matrix(); * matrix.rotate(Math.PI / 4); // Rotate 45 degrees * * // Chain with other transformations * matrix * .translate(100, 100) // Move to rotation center * .rotate(Math.PI) // Rotate 180 degrees * .scale(2, 2); // Scale after rotation * * // Common angles * matrix.rotate(Math.PI / 2); // 90 degrees * matrix.rotate(Math.PI); // 180 degrees * matrix.rotate(Math.PI * 2); // 360 degrees * ``` * @remarks * - Rotates around origin point (0,0) * - Affects position if translation was set * - Uses counter-clockwise rotation * - Order of operations matters when chaining * @param angle - The angle in radians * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.setTransform} For setting rotation directly * @see {@link Matrix.append} For combining transformations */ rotate(angle: number): this; /** * Appends the given Matrix to this Matrix. * Combines two matrices by multiplying them together: this = this * matrix * @example * ```ts * // Basic matrix combination * const matrix = new Matrix(); * const other = new Matrix().translate(100, 0).rotate(Math.PI / 4); * matrix.append(other); * ``` * @remarks * - Order matters: A.append(B) !== B.append(A) * - Modifies current matrix * - Preserves transformation order * - Commonly used for combining transforms * @param matrix - The matrix to append * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.prepend} For prepending transformations * @see {@link Matrix.appendFrom} For appending two external matrices */ append(matrix: Matrix): this; /** * Appends two matrices and sets the result to this matrix. * Performs matrix multiplication: this = A * B * @example * ```ts * // Basic matrix multiplication * const result = new Matrix(); * const matrixA = new Matrix().scale(2, 2); * const matrixB = new Matrix().rotate(Math.PI / 4); * result.appendFrom(matrixA, matrixB); * ``` * @remarks * - Order matters: A * B !== B * A * - Creates a new transformation from two others * - More efficient than append() for multiple operations * - Does not modify input matrices * @param a - The first matrix to multiply * @param b - The second matrix to multiply * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.append} For single matrix combination * @see {@link Matrix.prepend} For reverse order multiplication */ appendFrom(a: Matrix, b: Matrix): this; /** * Sets the matrix based on all the available properties. * Combines position, scale, rotation, skew and pivot in a single operation. * @example * ```ts * // Basic transform setup * const matrix = new Matrix(); * matrix.setTransform( * 100, 100, // position * 0, 0, // pivot * 2, 2, // scale * Math.PI / 4, // rotation (45 degrees) * 0, 0 // skew * ); * ``` * @remarks * - Updates all matrix components at once * - More efficient than separate transform calls * - Uses radians for rotation and skew * - Pivot affects rotation center * @param x - Position on the x axis * @param y - Position on the y axis * @param pivotX - Pivot on the x axis * @param pivotY - Pivot on the y axis * @param scaleX - Scale on the x axis * @param scaleY - Scale on the y axis * @param rotation - Rotation in radians * @param skewX - Skew on the x axis * @param skewY - Skew on the y axis * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.decompose} For extracting transform properties * @see {@link TransformableObject} For transform data structure */ setTransform(x: number, y: number, pivotX: number, pivotY: number, scaleX: number, scaleY: number, rotation: number, skewX: number, skewY: number): this; /** * Prepends the given Matrix to this Matrix. * Combines two matrices by multiplying them together: this = matrix * this * @example * ```ts * // Basic matrix prepend * const matrix = new Matrix().scale(2, 2); * const other = new Matrix().translate(100, 0); * matrix.prepend(other); // Translation happens before scaling * ``` * @remarks * - Order matters: A.prepend(B) !== B.prepend(A) * - Modifies current matrix * - Reverses transformation order compared to append() * @param matrix - The matrix to prepend * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.append} For appending transformations * @see {@link Matrix.appendFrom} For combining external matrices */ prepend(matrix: Matrix): this; /** * Decomposes the matrix into its individual transform components. * Extracts position, scale, rotation and skew values from the matrix. * @example * ```ts * // Basic decomposition * const matrix = new Matrix() * .translate(100, 100) * .rotate(Math.PI / 4) * .scale(2, 2); * * const transform = { * position: new Point(), * scale: new Point(), * pivot: new Point(), * skew: new Point(), * rotation: 0 * }; * * matrix.decompose(transform); * console.log(transform.position); // Point(100, 100) * console.log(transform.rotation); // ~0.785 (PI/4) * console.log(transform.scale); // Point(2, 2) * ``` * @remarks * - Handles combined transformations * - Accounts for pivot points * - Chooses between rotation/skew based on transform type * - Uses radians for rotation and skew * @param transform - The transform object to store the decomposed values * @returns The transform with the newly applied properties * @see {@link Matrix.setTransform} For composing from components * @see {@link TransformableObject} For transform structure */ decompose(transform: TransformableObject): TransformableObject; /** * Inverts this matrix. * Creates the matrix that when multiplied with this matrix results in an identity matrix. * @example * ```ts * // Basic matrix inversion * const matrix = new Matrix() * .translate(100, 50) * .scale(2, 2); * * matrix.invert(); // Now transforms in opposite direction * * // Verify inversion * const point = new Point(50, 50); * const transformed = matrix.apply(point); * const original = matrix.invert().apply(transformed); * // original ≈ point * ``` * @remarks * - Modifies the current matrix * - Useful for reversing transformations * - Cannot invert matrices with zero determinant * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.identity} For resetting to identity * @see {@link Matrix.applyInverse} For inverse transformations */ invert(): this; /** * Checks if this matrix is an identity matrix. * * An identity matrix has no transformations applied (default state). * @example * ```ts * // Check if matrix is identity * const matrix = new Matrix(); * console.log(matrix.isIdentity()); // true * * // Check after transformations * matrix.translate(100, 0); * console.log(matrix.isIdentity()); // false * * // Reset and verify * matrix.identity(); * console.log(matrix.isIdentity()); // true * ``` * @remarks * - Verifies a = 1, d = 1 (no scale) * - Verifies b = 0, c = 0 (no skew) * - Verifies tx = 0, ty = 0 (no translation) * @returns True if matrix has no transformations * @see {@link Matrix.identity} For resetting to identity * @see {@link Matrix.IDENTITY} For constant identity matrix */ isIdentity(): boolean; /** * Resets this Matrix to an identity (default) matrix. * Sets all components to their default values: scale=1, no skew, no translation. * @example * ```ts * // Reset transformed matrix * const matrix = new Matrix() * .scale(2, 2) * .rotate(Math.PI / 4); * matrix.identity(); // Back to default state * * // Chain after reset * matrix * .identity() * .translate(100, 100) * .scale(2, 2); * * // Compare with identity constant * const isDefault = matrix.equals(Matrix.IDENTITY); * ``` * @remarks * - Sets a=1, d=1 (default scale) * - Sets b=0, c=0 (no skew) * - Sets tx=0, ty=0 (no translation) * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.IDENTITY} For constant identity matrix * @see {@link Matrix.isIdentity} For checking identity state */ identity(): this; /** * Creates a new Matrix object with the same values as this one. * @returns A copy of this matrix. Good for chaining method calls. */ clone(): Matrix; /** * Creates a new Matrix object with the same values as this one. * @param matrix * @example * ```ts * // Basic matrix cloning * const matrix = new Matrix() * .translate(100, 100) * .rotate(Math.PI / 4); * const copy = matrix.clone(); * * // Clone and modify * const modified = matrix.clone() * .scale(2, 2); * * // Compare matrices * console.log(matrix.equals(copy)); // true * console.log(matrix.equals(modified)); // false * ``` * @returns A copy of this matrix. Good for chaining method calls. * @see {@link Matrix.copyTo} For copying to existing matrix * @see {@link Matrix.copyFrom} For copying from another matrix */ copyTo(matrix: Matrix): Matrix; /** * Changes the values of the matrix to be the same as the ones in given matrix. * @example * ```ts * // Basic matrix copying * const source = new Matrix() * .translate(100, 100) * .rotate(Math.PI / 4); * const target = new Matrix(); * target.copyFrom(source); * ``` * @param matrix - The matrix to copy from * @returns This matrix. Good for chaining method calls. * @see {@link Matrix.clone} For creating new matrix copy * @see {@link Matrix.copyTo} For copying to another matrix */ copyFrom(matrix: Matrix): this; /** * Checks if this matrix equals another matrix. * Compares all components for exact equality. * @example * ```ts * // Basic equality check * const m1 = new Matrix(); * const m2 = new Matrix(); * console.log(m1.equals(m2)); // true * * // Compare transformed matrices * const transform = new Matrix() * .translate(100, 100) * const clone = new Matrix() * .scale(2, 2); * console.log(transform.equals(clone)); // false * ``` * @param matrix - The matrix to compare to * @returns True if matrices are identical * @see {@link Matrix.copyFrom} For copying matrix values * @see {@link Matrix.isIdentity} For identity comparison */ equals(matrix: Matrix): boolean; toString(): string; /** * A default (identity) matrix with no transformations applied. * * > [!IMPORTANT] This is a shared read-only object. Create a new Matrix if you need to modify it. * @example * ```ts * // Get identity matrix reference * const identity = Matrix.IDENTITY; * console.log(identity.isIdentity()); // true * * // Compare with identity * const matrix = new Matrix(); * console.log(matrix.equals(Matrix.IDENTITY)); // true * * // Create new matrix instead of modifying IDENTITY * const transform = new Matrix() * .copyFrom(Matrix.IDENTITY) * .translate(100, 100); * ``` * @readonly * @returns A read-only identity matrix * @see {@link Matrix.shared} For temporary calculations * @see {@link Matrix.identity} For resetting matrices */ static get IDENTITY(): Readonly; /** * A static Matrix that can be used to avoid creating new objects. * Will always ensure the matrix is reset to identity when requested. * * > [!IMPORTANT] This matrix is shared and temporary. Do not store references to it. * @example * ```ts * // Use for temporary calculations * const tempMatrix = Matrix.shared; * tempMatrix.translate(100, 100).rotate(Math.PI / 4); * const point = tempMatrix.apply({ x: 10, y: 20 }); * * // Will be reset to identity on next access * const fresh = Matrix.shared; // Back to identity * ``` * @remarks * - Always returns identity matrix * - Safe to modify temporarily * - Not safe to store references * - Useful for one-off calculations * @readonly * @returns A fresh identity matrix for temporary use * @see {@link Matrix.IDENTITY} For immutable identity matrix * @see {@link Matrix.identity} For resetting matrices */ static get shared(): Matrix; } export interface ObservablePoint extends PixiMixins.ObservablePoint { } /** * Observer used to listen for observable point changes. * Provides callback mechanism for point value updates. * @example * ```ts * // Basic observer implementation * const observer: Observer = { * _onUpdate: (point) => { * console.log(`Point updated to (${point.x}, ${point.y})`); * } * }; * * // Create observable point with observer * const point = new ObservablePoint(observer, 100, 100); * * // Observer will be notified on changes * point.x = 200; // Logs: Point updated to (200, 100) * ``` * @remarks * - Used internally by ObservablePoint * - Triggered on x/y changes * - Can track multiple points * - Useful for change detection * @typeParam T - The type of point being observed * @see {@link ObservablePoint} The observable point class * @see {@link PointLike} For point interface * @category maths * @standard */ export interface Observer { /** * Callback to call when the point has updated. * Triggered whenever x or y coordinates change. * @param point - The point that was updated */ _onUpdate: (point?: T) => void; } /** * The ObservablePoint object represents a location in a two-dimensional coordinate system. * Triggers a callback when its position changes. * * The x and y properties represent the position on the horizontal and vertical axes, respectively. * @example * ```ts * // Basic observable point usage * const point = new ObservablePoint( * { _onUpdate: (p) => console.log(`Updated to (${p.x}, ${p.y})`) }, * 100, 100 * ); * * // Update triggers callback * point.x = 200; // Logs: Updated to (200, 100) * point.y = 300; // Logs: Updated to (200, 300) * * // Set both coordinates * point.set(50, 50); // Logs: Updated to (50, 50) * ``` * @see {@link Point} For non-observable version * @see {@link Observer} For observer interface * @see {@link PointLike} For point interface * @category maths * @standard */ export declare class ObservablePoint implements PointLike { /** @ignore */ _x: number; /** @ignore */ _y: number; /** This object used to call the `onUpdate` callback when the point changes. */ private readonly _observer; /** * Creates a new `ObservablePoint` * @param observer - Observer to pass to listen for change events. * @param {number} [x=0] - position of the point on the x axis * @param {number} [y=0] - position of the point on the y axis */ constructor(observer: Observer, x?: number, y?: number); /** * Creates a clone of this point. * @example * ```ts * // Basic cloning * const point = new ObservablePoint(observer, 100, 200); * const copy = point.clone(); * * // Clone with new observer * const newObserver = { * _onUpdate: (p) => console.log(`Clone updated: (${p.x}, ${p.y})`) * }; * const watched = point.clone(newObserver); * * // Verify independence * watched.set(300, 400); // Only triggers new observer * ``` * @param observer - Optional observer to pass to the new observable point * @returns A copy of this observable point * @see {@link ObservablePoint.copyFrom} For copying into existing point * @see {@link Observer} For observer interface details */ clone(observer?: Observer): ObservablePoint; /** * Sets the point to a new x and y position. * * If y is omitted, both x and y will be set to x. * @example * ```ts * // Basic position setting * const point = new ObservablePoint(observer); * point.set(100, 200); * * // Set both x and y to same value * point.set(50); // x=50, y=50 * ``` * @param x - Position on the x axis * @param y - Position on the y axis, defaults to x * @returns The point instance itself * @see {@link ObservablePoint.copyFrom} For copying from another point * @see {@link ObservablePoint.equals} For comparing positions */ set(x?: number, y?: number): this; /** * Copies x and y from the given point into this point. * @example * ```ts * // Basic copying * const source = new ObservablePoint(observer, 100, 200); * const target = new ObservablePoint(); * target.copyFrom(source); * * // Copy and chain operations * const point = new ObservablePoint() * .copyFrom(source) * .set(x + 50, y + 50); * * // Copy from any PointData * const data = { x: 10, y: 20 }; * point.copyFrom(data); * ``` * @param p - The point to copy from * @returns The point instance itself * @see {@link ObservablePoint.copyTo} For copying to another point * @see {@link ObservablePoint.clone} For creating new point copy */ copyFrom(p: PointData): this; /** * Copies this point's x and y into the given point. * @example * ```ts * // Basic copying * const source = new ObservablePoint(100, 200); * const target = new ObservablePoint(); * source.copyTo(target); * ``` * @param p - The point to copy to. Can be any type that is or extends `PointLike` * @returns The point (`p`) with values updated * @see {@link ObservablePoint.copyFrom} For copying from another point * @see {@link ObservablePoint.clone} For creating new point copy */ copyTo(p: T): T; /** * Checks if another point is equal to this point. * * Compares x and y values using strict equality. * @example * ```ts * // Basic equality check * const p1 = new ObservablePoint(100, 200); * const p2 = new ObservablePoint(100, 200); * console.log(p1.equals(p2)); // true * * // Compare with PointData * const data = { x: 100, y: 200 }; * console.log(p1.equals(data)); // true * * // Check different points * const p3 = new ObservablePoint(200, 300); * console.log(p1.equals(p3)); // false * ``` * @param p - The point to check * @returns `true` if both `x` and `y` are equal * @see {@link ObservablePoint.copyFrom} For making points equal * @see {@link PointData} For point data interface */ equals(p: PointData): boolean; toString(): string; /** * Position of the observable point on the x axis. * Triggers observer callback when value changes. * @example * ```ts * // Basic x position * const point = new ObservablePoint(observer); * point.x = 100; // Triggers observer * * // Use in calculations * const width = rightPoint.x - leftPoint.x; * ``` * @default 0 */ get x(): number; set x(value: number); /** * Position of the observable point on the y axis. * Triggers observer callback when value changes. * @example * ```ts * // Basic y position * const point = new ObservablePoint(observer); * point.y = 200; // Triggers observer * * // Use in calculations * const height = bottomPoint.y - topPoint.y; * ``` * @default 0 */ get y(): number; set y(value: number); } /** * Interface for HTMLImageElement. * @category environment * @advanced */ export interface ImageLike extends EventTarget { /** Whether or not the image has completely loaded. */ readonly complete: boolean; /** The Cross-Origin Resource Sharing (CORS) setting to use when retrieving the image. */ crossOrigin: string | null; /** The URL of the image which is currently presented in the element it represents. */ readonly currentSrc: string; /** The width. */ width: number; /** The height. */ height: number; /** The address or URL of the a media resource that is to be considered. */ src: string; /** Returns a Promise that resolves once the image is decoded. */ decode(): Promise; onload: ((this: GlobalEventHandlers, ev: Event) => any) | null; onerror: ((this: GlobalEventHandlers, ev: Event) => any) | null; } /** * Common interface for CanvasRenderingContext2D, OffscreenCanvasRenderingContext2D, and other custom canvas 2D context. * @category environment * @advanced */ export interface ICanvasRenderingContext2D extends CanvasState, CanvasTransform, CanvasCompositing, CanvasImageSmoothing, CanvasFillStrokeStyles, CanvasShadowStyles, CanvasFilters, CanvasRect, CanvasDrawPath, CanvasText, CanvasDrawImage, CanvasImageData, CanvasPathDrawingStyles, Omit, CanvasPath { /** creates a pattern using the specified image and repetition. */ createPattern(image: CanvasImageSource | ICanvas | ImageLike, repetition: string | null): CanvasPattern | null; /** provides different ways to draw an image onto the canvas */ drawImage(image: CanvasImageSource | ICanvas | ImageLike, dx: number, dy: number): void; drawImage(image: CanvasImageSource | ICanvas | ImageLike, dx: number, dy: number, dw: number, dh: number): void; drawImage(image: CanvasImageSource | ICanvas | ImageLike, sx: number, sy: number, sw: number, sh: number, dx: number, dy: number, dw: number, dh: number): void; /** sets the horizontal spacing behavior between text characters. */ letterSpacing?: string; /** sets the horizontal spacing behavior between text characters. */ textLetterSpacing?: string; } /** * The context identifiers for the canvas. * These identifiers are used to specify the type of rendering context to create. * @category environment * @advanced */ export type ContextIds = "2d" | "bitmaprenderer" | "webgl" | "experimental-webgl" | "webgl2" | "experimental-webgl2" | "webgpu"; /** * The predefined color spaces for the canvas. * @category environment * @advanced */ type PredefinedColorSpace$1 = "srgb" | "display-p3"; /** * The rendering context for the canvas. * @category environment * @advanced */ type RenderingContext$1 = ICanvasRenderingContext2D | ImageBitmapRenderingContext | WebGLRenderingContext | WebGL2RenderingContext; /** * The context 2D settings for creating a rendering context. * @category environment * @advanced */ export interface ICanvasRenderingContext2DSettings { alpha?: boolean; colorSpace?: PredefinedColorSpace$1; desynchronized?: boolean; willReadFrequently?: boolean; } /** * The context settings for creating a rendering context. * @category environment * @advanced */ export type ContextSettings = ICanvasRenderingContext2DSettings | ImageBitmapRenderingContextSettings | WebGLContextAttributes; /** * The parent node of the canvas. * @category environment * @advanced */ export interface ICanvasParentNode { /** Adds a node to the end of the list of children of the parent node. */ appendChild(element: HTMLElement): void; /** Removes a child node from the parent node. */ removeChild(element: HTMLElement): void; removeChild(element: ICanvas): void; } /** * Represents the style properties of a canvas element. * @category environment * @advanced */ export interface ICanvasStyle { width?: string; height?: string; cursor?: string; touchAction?: string; msTouchAction?: string; msContentZooming?: string; } /** * Represents a rectangle in the canvas. * @category environment * @advanced */ export interface ICanvasRect { /** The x-coordinate of the rectangle's top-left corner. */ x: number; /** The y-coordinate of the rectangle's top-left corner. */ y: number; /** The width of the rectangle. */ width: number; /** The height of the rectangle. */ height: number; } /** * WebGL context events. * @category environment * @advanced */ export interface WebGLContextEventMap { "webglcontextlost": WebGLContextEvent; "webglcontextrestore": WebGLContextEvent; } /** * Common interface for HTMLCanvasElement, OffscreenCanvas, and other custom canvas classes. * @extends PixiMixins.ICanvas * @extends Partial * @category environment * @advanced */ export interface ICanvas extends PixiMixins.ICanvas, Partial { /** Width of the canvas. */ width: number; /** Height of the canvas. */ height: number; /** * Get rendering context of the canvas. * @param {ContextIds} contextId - The identifier of the type of context to create. * @param {ContextSettings} options - The options for creating context. * @returns {RenderingContext | null} The created context, or null if contextId is not supported. */ getContext(contextId: "2d", options?: ICanvasRenderingContext2DSettings): ICanvasRenderingContext2D | null; getContext(contextId: "bitmaprenderer", options?: ImageBitmapRenderingContextSettings): ImageBitmapRenderingContext | null; getContext(contextId: "webgl" | "experimental-webgl", options?: WebGLContextAttributes): WebGLRenderingContext | null; getContext(contextId: "webgl2" | "experimental-webgl2", options?: WebGLContextAttributes): WebGL2RenderingContext | null; getContext(contextId: "webgpu"): GPUCanvasContext | null; getContext(contextId: ContextIds, options?: ContextSettings): RenderingContext$1 | null; /** * Get the content of the canvas as data URL. * @param {string} [type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {string} A string containing the requested data URL. */ toDataURL?(type?: string, quality?: number): string; /** * Creates a Blob from the content of the canvas. * @param {(blob: Blob | null) => void} callback - A callback function with the resulting `Blob` object * as a single argument. `null` may be passed if the image cannot be created for any reason. * @param {string} [type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {void} */ toBlob?(callback: (blob: Blob | null) => void, type?: string, quality?: number): void; /** * Get the content of the canvas as Blob. * @param {object} [options] - The options for creating Blob. * @param {string} [options.type] - A string indicating the image format. The default type is `image/png`; * that type is also used if the given type isn't supported. * @param {string} [options.quality] - A number between 0 and 1 indicating the image quality to be used when * creating images using file formats that support lossy compression (such as `image/jpeg` or `image/webp`). * A user agent will use its default quality value if this option is not specified, or if the number * is outside the allowed range. * @returns {Promise} A `Promise` returning a Blob object representing the image contained in the canvas. */ convertToBlob?(options?: { type?: string; quality?: number; }): Promise; /** * Adds the listener for the specified event. * @param {string} type - The type of event to listen for. * @param {EventListenerOrEventListenerObject} listener - The callback to invoke when the event is fired. * @param {boolean | AddEventListenerOptions} options - The options for adding event listener. * @returns {void} */ addEventListener?: { (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; (type: K, listener: (this: ICanvas, ev: WebGLContextEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; }; /** * Removes the listener for the specified event. * @param {string} type - The type of event to listen for. * @param {EventListenerOrEventListenerObject} listener - The callback to invoke when the event is fired. * @param {boolean | EventListenerOptions} options - The options for removing event listener. * @returns {void} */ removeEventListener?: { (type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void; (type: K, listener: (this: ICanvas, ev: WebGLContextEventMap[K]) => any, options?: boolean | EventListenerOptions): void; }; /** * Dispatches a event. * @param {Event} event - The Event object to dispatch. Its Event.target property will be set to the current EventTarget. * @returns {boolean} Returns false if event is cancelable, and at least one of the event handlers which received event * called Event.preventDefault(). Otherwise true. */ dispatchEvent(event: Event): boolean; /** Parent node of the canvas. */ readonly parentNode?: ICanvasParentNode | null; /** Style of the canvas. */ readonly style?: ICanvasStyle; /** * Get the position and the size of the canvas. * @returns The smallest rectangle which contains the entire canvas. */ getBoundingClientRect?(): ICanvasRect; } declare function earcut(vertices: ArrayLike, holes?: ArrayLike, dimensions?: number): number[]; /** * A polygon triangulation library * @see {@link https://github.com/mapbox/earcut} * @param {number[]} vertices - A flat array of vertex coordinates * @param {number[]} [holes] - An array of hole indices * @param {number} [dimensions=2] - The number of coordinates per vertex in the input array * @returns {number[]} Triangulated polygon * @category utils * @advanced */ declare const earcut$1: typeof earcut; /** * SystemRunner is used internally by the renderers as an efficient way for systems to * be notified about what the renderer is up to during the rendering phase. * * ```ts * import { SystemRunner } from 'pixi.js'; * * const myObject = { * loaded: new SystemRunner('loaded') * } * * const listener = { * loaded: function(){ * // thin * } * } * * myObject.loaded.add(listener); * * myObject.loaded.emit(); * ``` * * Or for handling calling the same function on many items * ```ts * import { SystemRunner } from 'pixi.js'; * * const myGame = { * update: new SystemRunner('update') * } * * const gameObject = { * update: function(time){ * // update my gamey state * } * } * * myGame.update.add(gameObject); * * myGame.update.emit(time); * ``` * @category rendering * @internal */ export declare class SystemRunner { items: any[]; private _name; /** * @param name - The function name that will be executed on the listeners added to this Runner. */ constructor(name: string); /** * Dispatch/Broadcast Runner to all listeners added to the queue. * @param {...any} params - (optional) parameters to pass to each listener */ emit(a0?: unknown, a1?: unknown, a2?: unknown, a3?: unknown, a4?: unknown, a5?: unknown, a6?: unknown, a7?: unknown): this; /** * Add a listener to the Runner * * Runners do not need to have scope or functions passed to them. * All that is required is to pass the listening object and ensure that it has contains a function that has the same name * as the name provided to the Runner when it was created. * * Eg A listener passed to this Runner will require a 'complete' function. * * ```ts * import { Runner } from 'pixi.js'; * * const complete = new Runner('complete'); * ``` * * The scope used will be the object itself. * @param {any} item - The object that will be listening. */ add(item: unknown): this; /** * Remove a single listener from the dispatch queue. * @param {any} item - The listener that you would like to remove. */ remove(item: unknown): this; /** * Check to see if the listener is already in the Runner * @param {any} item - The listener that you would like to check. */ contains(item: unknown): boolean; /** Remove all listeners from the Runner */ removeAll(): this; /** Remove all references, don't use after this. */ destroy(): void; /** * `true` if there are no this Runner contains no listeners * @readonly */ get empty(): boolean; /** * The name of the runner. * @readonly */ get name(): string; } /** * A simple axis-aligned bounding box (AABB) data structure used to define rectangular boundaries. * Provides a clearer alternative to array-based bounds representation [minX, minY, maxX, maxY]. * @example * ```ts * // Create bounds data * const bounds: BoundsData = { * minX: 0, * minY: 0, * maxX: 100, * maxY: 100 * }; * * // Calculate dimensions * const width = bounds.maxX - bounds.minX; * const height = bounds.maxY - bounds.minY; * * // Check if point is inside * const isInside = (x: number, y: number) => * x >= bounds.minX && x <= bounds.maxX && * y >= bounds.minY && y <= bounds.maxY; * ``` * @see {@link Bounds} For full bounds implementation * @see {@link Container#getBounds} For getting bounds * @category rendering * @standard */ export interface BoundsData { /** The minimum X coordinate of the bounds */ minX: number; /** The minimum Y coordinate of the bounds */ minY: number; /** The maximum X coordinate of the bounds */ maxX: number; /** The maximum Y coordinate of the bounds */ maxY: number; } /** * A representation of an axis-aligned bounding box (AABB) used for efficient collision detection and culling. * Stores minimum and maximum coordinates to define a rectangular boundary. * @example * ```ts * // Create bounds * const bounds = new Bounds(); * * // Add a rectangular frame * bounds.addFrame(0, 0, 100, 100); * console.log(bounds.width, bounds.height); // 100, 100 * * // Transform bounds * const matrix = new Matrix() * .translate(50, 50) * .rotate(Math.PI / 4); * bounds.applyMatrix(matrix); * * // Check point intersection * if (bounds.containsPoint(75, 75)) { * console.log('Point is inside bounds!'); * } * ``` * @category rendering * @standard */ export declare class Bounds { /** * The minimum X coordinate of the bounds. * Represents the leftmost edge of the bounding box. * @example * ```ts * const bounds = new Bounds(); * // Set left edge * bounds.minX = 100; * ``` * @default Infinity */ minX: number; /** * The minimum Y coordinate of the bounds. * Represents the topmost edge of the bounding box. * @example * ```ts * const bounds = new Bounds(); * // Set top edge * bounds.minY = 100; * ``` * @default Infinity */ minY: number; /** * The maximum X coordinate of the bounds. * Represents the rightmost edge of the bounding box. * @example * ```ts * const bounds = new Bounds(); * // Set right edge * bounds.maxX = 200; * // Get width * const width = bounds.maxX - bounds.minX; * ``` * @default -Infinity */ maxX: number; /** * The maximum Y coordinate of the bounds. * Represents the bottommost edge of the bounding box. * @example * ```ts * const bounds = new Bounds(); * // Set bottom edge * bounds.maxY = 200; * // Get height * const height = bounds.maxY - bounds.minY; * ``` * @default -Infinity */ maxY: number; /** * The transformation matrix applied to this bounds object. * Used when calculating bounds with transforms. * @example * ```ts * const bounds = new Bounds(); * * // Apply translation matrix * bounds.matrix = new Matrix() * .translate(100, 100); * * // Combine transformations * bounds.matrix = new Matrix() * .translate(50, 50) * .rotate(Math.PI / 4) * .scale(2, 2); * * // Use in bounds calculations * bounds.addFrame(0, 0, 100, 100); // Uses current matrix * bounds.addFrame(0, 0, 100, 100, customMatrix); // Override matrix * ``` * @advanced */ matrix: Matrix; private _rectangle; /** * Creates a new Bounds object. * @param minX - The minimum X coordinate of the bounds. * @param minY - The minimum Y coordinate of the bounds. * @param maxX - The maximum X coordinate of the bounds. * @param maxY - The maximum Y coordinate of the bounds. */ constructor(minX?: number, minY?: number, maxX?: number, maxY?: number); /** * Checks if bounds are empty, meaning either width or height is zero or negative. * Empty bounds occur when min values exceed max values on either axis. * @example * ```ts * const bounds = new Bounds(); * * // Check if newly created bounds are empty * console.log(bounds.isEmpty()); // true, default bounds are empty * * // Add frame and check again * bounds.addFrame(0, 0, 100, 100); * console.log(bounds.isEmpty()); // false, bounds now have area * * // Clear bounds * bounds.clear(); * console.log(bounds.isEmpty()); // true, bounds are empty again * ``` * @returns True if bounds are empty (have no area) * @see {@link Bounds#clear} For resetting bounds * @see {@link Bounds#isValid} For checking validity */ isEmpty(): boolean; /** * The bounding rectangle representation of these bounds. * Lazily creates and updates a Rectangle instance based on the current bounds. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * * // Get rectangle representation * const rect = bounds.rectangle; * console.log(rect.x, rect.y, rect.width, rect.height); * * // Use for hit testing * if (bounds.rectangle.contains(mouseX, mouseY)) { * console.log('Mouse is inside bounds!'); * } * ``` * @see {@link Rectangle} For rectangle methods * @see {@link Bounds.isEmpty} For bounds validation */ get rectangle(): Rectangle; /** * Clears the bounds and resets all coordinates to their default values. * Resets the transformation matrix back to identity. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * console.log(bounds.isEmpty()); // false * // Clear the bounds * bounds.clear(); * console.log(bounds.isEmpty()); // true * ``` * @returns This bounds object for chaining */ clear(): this; /** * Sets the bounds directly using coordinate values. * Provides a way to set all bounds values at once. * @example * ```ts * const bounds = new Bounds(); * bounds.set(0, 0, 100, 100); * ``` * @param x0 - Left X coordinate of frame * @param y0 - Top Y coordinate of frame * @param x1 - Right X coordinate of frame * @param y1 - Bottom Y coordinate of frame * @see {@link Bounds#addFrame} For matrix-aware bounds setting * @see {@link Bounds#clear} For resetting bounds */ set(x0: number, y0: number, x1: number, y1: number): void; /** * Adds a rectangular frame to the bounds, optionally transformed by a matrix. * Updates the bounds to encompass the new frame coordinates. * @example * ```ts * const bounds = new Bounds(); * bounds.addFrame(0, 0, 100, 100); * * // Add transformed frame * const matrix = new Matrix() * .translate(50, 50) * .rotate(Math.PI / 4); * bounds.addFrame(0, 0, 100, 100, matrix); * ``` * @param x0 - Left X coordinate of frame * @param y0 - Top Y coordinate of frame * @param x1 - Right X coordinate of frame * @param y1 - Bottom Y coordinate of frame * @param matrix - Optional transformation matrix * @see {@link Bounds#addRect} For adding Rectangle objects * @see {@link Bounds#addBounds} For adding other Bounds */ addFrame(x0: number, y0: number, x1: number, y1: number, matrix?: Matrix): void; /** * Adds a rectangle to the bounds, optionally transformed by a matrix. * Updates the bounds to encompass the given rectangle. * @example * ```ts * const bounds = new Bounds(); * // Add simple rectangle * const rect = new Rectangle(0, 0, 100, 100); * bounds.addRect(rect); * * // Add transformed rectangle * const matrix = new Matrix() * .translate(50, 50) * .rotate(Math.PI / 4); * bounds.addRect(rect, matrix); * ``` * @param rect - The rectangle to be added * @param matrix - Optional transformation matrix * @see {@link Bounds#addFrame} For adding raw coordinates * @see {@link Bounds#addBounds} For adding other bounds */ addRect(rect: Rectangle, matrix?: Matrix): void; /** * Adds another bounds object to this one, optionally transformed by a matrix. * Expands the bounds to include the given bounds' area. * @example * ```ts * const bounds = new Bounds(); * * // Add child bounds * const childBounds = sprite.getBounds(); * bounds.addBounds(childBounds); * * // Add transformed bounds * const matrix = new Matrix() * .scale(2, 2); * bounds.addBounds(childBounds, matrix); * ``` * @param bounds - The bounds to be added * @param matrix - Optional transformation matrix * @see {@link Bounds#addFrame} For adding raw coordinates * @see {@link Bounds#addRect} For adding rectangles */ addBounds(bounds: BoundsData, matrix?: Matrix): void; /** * Adds other Bounds as a mask, creating an intersection of the two bounds. * Only keeps the overlapping region between current bounds and mask bounds. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Create mask bounds * const mask = new Bounds(); * mask.addFrame(50, 50, 150, 150); * // Apply mask - results in bounds of (50,50,100,100) * bounds.addBoundsMask(mask); * ``` * @param mask - The Bounds to use as a mask * @see {@link Bounds#addBounds} For union operation * @see {@link Bounds#fit} For fitting to rectangle */ addBoundsMask(mask: Bounds): void; /** * Applies a transformation matrix to the bounds, updating its coordinates. * Transforms all corners of the bounds using the given matrix. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Apply translation * const translateMatrix = new Matrix() * .translate(50, 50); * bounds.applyMatrix(translateMatrix); * ``` * @param matrix - The matrix to apply to the bounds * @see {@link Matrix} For matrix operations * @see {@link Bounds#addFrame} For adding transformed frames */ applyMatrix(matrix: Matrix): void; /** * Resizes the bounds object to fit within the given rectangle. * Clips the bounds if they extend beyond the rectangle's edges. * @example * ```ts * const bounds = new Bounds(0, 0, 200, 200); * // Fit within viewport * const viewport = new Rectangle(50, 50, 100, 100); * bounds.fit(viewport); * // bounds are now (50, 50, 150, 150) * ``` * @param rect - The rectangle to fit within * @returns This bounds object for chaining * @see {@link Bounds#addBoundsMask} For intersection * @see {@link Bounds#pad} For expanding bounds */ fit(rect: Rectangle): this; /** * Resizes the bounds object to include the given bounds. * Similar to fit() but works with raw coordinate values instead of a Rectangle. * @example * ```ts * const bounds = new Bounds(0, 0, 200, 200); * // Fit to specific coordinates * bounds.fitBounds(50, 150, 50, 150); * // bounds are now (50, 50, 150, 150) * ``` * @param left - The left value of the bounds * @param right - The right value of the bounds * @param top - The top value of the bounds * @param bottom - The bottom value of the bounds * @returns This bounds object for chaining * @see {@link Bounds#fit} For fitting to Rectangle * @see {@link Bounds#addBoundsMask} For intersection */ fitBounds(left: number, right: number, top: number, bottom: number): this; /** * Pads bounds object, making it grow in all directions. * If paddingY is omitted, both paddingX and paddingY will be set to paddingX. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * * // Add equal padding * bounds.pad(10); * // bounds are now (-10, -10, 110, 110) * * // Add different padding for x and y * bounds.pad(20, 10); * // bounds are now (-30, -20, 130, 120) * ``` * @param paddingX - The horizontal padding amount * @param paddingY - The vertical padding amount * @returns This bounds object for chaining * @see {@link Bounds#fit} For constraining bounds * @see {@link Bounds#scale} For uniform scaling */ pad(paddingX: number, paddingY?: number): this; /** * Ceils the bounds by rounding up max values and rounding down min values. * Useful for pixel-perfect calculations and avoiding fractional pixels. * @example * ```ts * const bounds = new Bounds(); * bounds.set(10.2, 10.9, 50.1, 50.8); * * // Round to whole pixels * bounds.ceil(); * // bounds are now (10, 10, 51, 51) * ``` * @returns This bounds object for chaining * @see {@link Bounds#scale} For size adjustments * @see {@link Bounds#fit} For constraining bounds */ ceil(): this; /** * Creates a new Bounds instance with the same values. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * * // Create a copy * const copy = bounds.clone(); * * // Original and copy are independent * bounds.pad(10); * console.log(copy.width === bounds.width); // false * ``` * @returns A new Bounds instance with the same values * @see {@link Bounds#copyFrom} For reusing existing bounds */ clone(): Bounds; /** * Scales the bounds by the given values, adjusting all edges proportionally. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * * // Scale uniformly * bounds.scale(2); * // bounds are now (0, 0, 200, 200) * * // Scale non-uniformly * bounds.scale(0.5, 2); * // bounds are now (0, 0, 100, 400) * ``` * @param x - The X value to scale by * @param y - The Y value to scale by (defaults to x) * @returns This bounds object for chaining * @see {@link Bounds#pad} For adding padding * @see {@link Bounds#fit} For constraining size */ scale(x: number, y?: number): this; /** * The x position of the bounds in local space. * Setting this value will move the bounds while maintaining its width. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Get x position * console.log(bounds.x); // 0 * * // Move bounds horizontally * bounds.x = 50; * console.log(bounds.minX, bounds.maxX); // 50, 150 * * // Width stays the same * console.log(bounds.width); // Still 100 * ``` */ get x(): number; set x(value: number); /** * The y position of the bounds in local space. * Setting this value will move the bounds while maintaining its height. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Get y position * console.log(bounds.y); // 0 * * // Move bounds vertically * bounds.y = 50; * console.log(bounds.minY, bounds.maxY); // 50, 150 * * // Height stays the same * console.log(bounds.height); // Still 100 * ``` */ get y(): number; set y(value: number); /** * The width value of the bounds. * Represents the distance between minX and maxX coordinates. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Get width * console.log(bounds.width); // 100 * // Resize width * bounds.width = 200; * console.log(bounds.maxX - bounds.minX); // 200 * ``` */ get width(): number; set width(value: number); /** * The height value of the bounds. * Represents the distance between minY and maxY coordinates. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Get height * console.log(bounds.height); // 100 * // Resize height * bounds.height = 150; * console.log(bounds.maxY - bounds.minY); // 150 * ``` */ get height(): number; set height(value: number); /** * The left edge coordinate of the bounds. * Alias for minX. * @example * ```ts * const bounds = new Bounds(50, 0, 150, 100); * console.log(bounds.left); // 50 * console.log(bounds.left === bounds.minX); // true * ``` * @readonly */ get left(): number; /** * The right edge coordinate of the bounds. * Alias for maxX. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * console.log(bounds.right); // 100 * console.log(bounds.right === bounds.maxX); // true * ``` * @readonly */ get right(): number; /** * The top edge coordinate of the bounds. * Alias for minY. * @example * ```ts * const bounds = new Bounds(0, 25, 100, 125); * console.log(bounds.top); // 25 * console.log(bounds.top === bounds.minY); // true * ``` * @readonly */ get top(): number; /** * The bottom edge coordinate of the bounds. * Alias for maxY. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 200); * console.log(bounds.bottom); // 200 * console.log(bounds.bottom === bounds.maxY); // true * ``` * @readonly */ get bottom(): number; /** * Whether the bounds has positive width and height. * Checks if both dimensions are greater than zero. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Check if bounds are positive * console.log(bounds.isPositive); // true * * // Negative bounds * bounds.maxX = bounds.minX; * console.log(bounds.isPositive); // false, width is 0 * ``` * @readonly * @see {@link Bounds#isEmpty} For checking empty state * @see {@link Bounds#isValid} For checking validity */ get isPositive(): boolean; /** * Whether the bounds has valid coordinates. * Checks if the bounds has been initialized with real values. * @example * ```ts * const bounds = new Bounds(); * console.log(bounds.isValid); // false, default state * * // Set valid bounds * bounds.addFrame(0, 0, 100, 100); * console.log(bounds.isValid); // true * ``` * @readonly * @see {@link Bounds#isEmpty} For checking empty state * @see {@link Bounds#isPositive} For checking dimensions */ get isValid(): boolean; /** * Adds vertices from a Float32Array to the bounds, optionally transformed by a matrix. * Used for efficiently updating bounds from raw vertex data. * @example * ```ts * const bounds = new Bounds(); * * // Add vertices from geometry * const vertices = new Float32Array([ * 0, 0, // Vertex 1 * 100, 0, // Vertex 2 * 100, 100 // Vertex 3 * ]); * bounds.addVertexData(vertices, 0, 6); * * // Add transformed vertices * const matrix = new Matrix() * .translate(50, 50) * .rotate(Math.PI / 4); * bounds.addVertexData(vertices, 0, 6, matrix); * * // Add subset of vertices * bounds.addVertexData(vertices, 2, 4); // Only second vertex * ``` * @param vertexData - The array of vertices to add * @param beginOffset - Starting index in the vertex array * @param endOffset - Ending index in the vertex array (excluded) * @param matrix - Optional transformation matrix * @see {@link Bounds#addFrame} For adding rectangular frames * @see {@link Matrix} For transformation details */ addVertexData(vertexData: Float32Array, beginOffset: number, endOffset: number, matrix?: Matrix): void; /** * Checks if a point is contained within the bounds. * Returns true if the point's coordinates fall within the bounds' area. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * // Basic point check * console.log(bounds.containsPoint(50, 50)); // true * console.log(bounds.containsPoint(150, 150)); // false * * // Check edges * console.log(bounds.containsPoint(0, 0)); // true, includes edges * console.log(bounds.containsPoint(100, 100)); // true, includes edges * ``` * @param x - x coordinate to check * @param y - y coordinate to check * @returns True if the point is inside the bounds * @see {@link Bounds#isPositive} For valid bounds check * @see {@link Bounds#rectangle} For Rectangle representation */ containsPoint(x: number, y: number): boolean; /** * Returns a string representation of the bounds. * Useful for debugging and logging bounds information. * @example * ```ts * const bounds = new Bounds(0, 0, 100, 100); * console.log(bounds.toString()); // "[pixi.js:Bounds minX=0 minY=0 maxX=100 maxY=100 width=100 height=100]" * ``` * @returns A string describing the bounds * @see {@link Bounds#copyFrom} For copying bounds * @see {@link Bounds#clone} For creating a new instance */ toString(): string; /** * Copies the bounds from another bounds object. * Useful for reusing bounds objects and avoiding allocations. * @example * ```ts * const sourceBounds = new Bounds(0, 0, 100, 100); * // Copy bounds * const targetBounds = new Bounds(); * targetBounds.copyFrom(sourceBounds); * ``` * @param bounds - The bounds to copy from * @returns This bounds object for chaining * @see {@link Bounds#clone} For creating new instances */ copyFrom(bounds: Bounds): this; } /** * Two Pi. * @type {number} * @category maths * @standard */ export declare const PI_2: number; /** * Conversion factor for converting radians to degrees. * @type {number} RAD_TO_DEG * @category maths * @standard */ export declare const RAD_TO_DEG: number; /** * Conversion factor for converting degrees to radians. * @type {number} * @category maths * @standard */ export declare const DEG_TO_RAD: number; /** * Constants that identify shapes, mainly to prevent `instanceof` calls. * @category maths * @advanced */ export type SHAPE_PRIMITIVE = "polygon" | "rectangle" | "circle" | "ellipse" | "triangle" | "roundedRectangle"; /** * A basic interface that defines common properties and methods for all Pixi shape primitives. * Provides a standard API for shape manipulation, hit testing, and bounds calculation. * @example * ```ts * // Implement basic shape * class CustomShape implements ShapePrimitive { * public readonly type = 'custom'; * public x = 0; * public y = 0; * * // Implement required methods * public contains(x: number, y: number): boolean { * // Custom hit testing logic * return true; * } * * public getBounds(out?: Rectangle): Rectangle { * // Custom bounds calculation * return out || new Rectangle(); * } * * // ... implement other required methods * } * // Use in a container * container.hitArea = new CustomShape(); * ``` * @see {@link Rectangle} For rectangular shape implementation * @see {@link Circle} For circular shape implementation * @see {@link Polygon} For polygon shape implementation * @category maths * @advanced */ export interface ShapePrimitive { /** The type of the object, mainly used to avoid `instanceof` checks */ readonly type: SHAPE_PRIMITIVE | (string & {}); /** Checks whether the x and y coordinates passed to this function are contained within this ShapePrimitive. */ contains(x: number, y: number): boolean; /** Checks whether the x and y coordinates passed to this function are contained within the stroke of this shape */ strokeContains(x: number, y: number, strokeWidth: number, alignment?: number): boolean; /** Creates a clone of this ShapePrimitive instance. */ clone(): ShapePrimitive; /** Copies the properties from another ShapePrimitive to this ShapePrimitive. */ copyFrom(source: ShapePrimitive): void; /** Copies the properties from this ShapePrimitive to another ShapePrimitive. */ copyTo(destination: ShapePrimitive): void; /** Returns the framing rectangle of the ShapePrimitive as a Rectangle object. */ getBounds(out?: Rectangle): Rectangle; /** The X coordinate of the shape */ readonly x: number; /** The Y coordinate of the shape */ readonly y: number; } export interface Rectangle extends PixiMixins.Rectangle { } /** * The `Rectangle` object represents a rectangular area defined by its position and dimensions. * Used for hit testing, bounds calculation, and general geometric operations. * @example * ```ts * // Basic rectangle creation * const rect = new Rectangle(100, 100, 200, 150); * * // Use as container bounds * container.hitArea = new Rectangle(0, 0, 100, 100); * * // Check point containment * const isInside = rect.contains(mouseX, mouseY); * * // Manipulate dimensions * rect.width *= 2; * rect.height += 50; * ``` * @remarks * - Position defined by top-left corner (x,y) * - Dimensions defined by width and height * - Supports point and rectangle containment * - Common in UI and layout calculations * @see {@link Circle} For circular shapes * @see {@link Polygon} For complex shapes * @see {@link RoundedRectangle} For rounded corners * @category maths * @standard */ export declare class Rectangle implements ShapePrimitive { /** * The type of the object, mainly used to avoid `instanceof` checks * @example * ```ts * // Check shape type * const shape = new Rectangle(0, 0, 100, 100); * console.log(shape.type); // 'rectangle' * * // Use in type guards * if (shape.type === 'rectangle') { * console.log(shape.width, shape.height); * } * ``` * @readonly * @default 'rectangle' * @see {@link SHAPE_PRIMITIVE} For all shape types */ readonly type: SHAPE_PRIMITIVE; /** * The X coordinate of the upper-left corner of the rectangle * @example * ```ts * // Basic x position * const rect = new Rectangle(); * rect.x = 100; * ``` * @default 0 */ x: number; /** * The Y coordinate of the upper-left corner of the rectangle * @example * ```ts * // Basic y position * const rect = new Rectangle(); * rect.y = 100; * ``` * @default 0 */ y: number; /** * The overall width of this rectangle * @example * ```ts * // Basic width setting * const rect = new Rectangle(); * rect.width = 200; * ``` * @default 0 */ width: number; /** * The overall height of this rectangle * @example * ```ts * // Basic height setting * const rect = new Rectangle(); * rect.height = 150; * ``` * @default 0 */ height: number; /** * @param x - The X coordinate of the upper-left corner of the rectangle * @param y - The Y coordinate of the upper-left corner of the rectangle * @param width - The overall width of the rectangle * @param height - The overall height of the rectangle */ constructor(x?: string | number, y?: string | number, width?: string | number, height?: string | number); /** * Returns the left edge (x-coordinate) of the rectangle. * @example * ```ts * // Get left edge position * const rect = new Rectangle(100, 100, 200, 150); * console.log(rect.left); // 100 * * // Use in alignment calculations * sprite.x = rect.left + padding; * * // Compare positions * if (point.x > rect.left) { * console.log('Point is right of rectangle'); * } * ``` * @readonly * @returns The x-coordinate of the left edge * @see {@link Rectangle.right} For right edge position * @see {@link Rectangle.x} For direct x-coordinate access */ get left(): number; /** * Returns the right edge (x + width) of the rectangle. * @example * ```ts * // Get right edge position * const rect = new Rectangle(100, 100, 200, 150); * console.log(rect.right); // 300 * * // Align to right edge * sprite.x = rect.right - sprite.width; * * // Check boundaries * if (point.x < rect.right) { * console.log('Point is inside right bound'); * } * ``` * @readonly * @returns The x-coordinate of the right edge * @see {@link Rectangle.left} For left edge position * @see {@link Rectangle.width} For width value */ get right(): number; /** * Returns the top edge (y-coordinate) of the rectangle. * @example * ```ts * // Get top edge position * const rect = new Rectangle(100, 100, 200, 150); * console.log(rect.top); // 100 * * // Position above rectangle * sprite.y = rect.top - sprite.height; * * // Check vertical position * if (point.y > rect.top) { * console.log('Point is below top edge'); * } * ``` * @readonly * @returns The y-coordinate of the top edge * @see {@link Rectangle.bottom} For bottom edge position * @see {@link Rectangle.y} For direct y-coordinate access */ get top(): number; /** * Returns the bottom edge (y + height) of the rectangle. * @example * ```ts * // Get bottom edge position * const rect = new Rectangle(100, 100, 200, 150); * console.log(rect.bottom); // 250 * * // Stack below rectangle * sprite.y = rect.bottom + margin; * * // Check vertical bounds * if (point.y < rect.bottom) { * console.log('Point is above bottom edge'); * } * ``` * @readonly * @returns The y-coordinate of the bottom edge * @see {@link Rectangle.top} For top edge position * @see {@link Rectangle.height} For height value */ get bottom(): number; /** * Determines whether the Rectangle is empty (has no area). * @example * ```ts * // Check zero dimensions * const rect = new Rectangle(100, 100, 0, 50); * console.log(rect.isEmpty()); // true * ``` * @returns True if the rectangle has no area * @see {@link Rectangle.width} For width value * @see {@link Rectangle.height} For height value */ isEmpty(): boolean; /** * A constant empty rectangle. This is a new object every time the property is accessed. * @example * ```ts * // Get fresh empty rectangle * const empty = Rectangle.EMPTY; * console.log(empty.isEmpty()); // true * ``` * @returns A new empty rectangle instance * @see {@link Rectangle.isEmpty} For empty state testing */ static get EMPTY(): Rectangle; /** * Creates a clone of this Rectangle * @example * ```ts * // Basic cloning * const original = new Rectangle(100, 100, 200, 150); * const copy = original.clone(); * * // Clone and modify * const modified = original.clone(); * modified.width *= 2; * modified.height += 50; * * // Verify independence * console.log(original.width); // 200 * console.log(modified.width); // 400 * ``` * @returns A copy of the rectangle * @see {@link Rectangle.copyFrom} For copying into existing rectangle * @see {@link Rectangle.copyTo} For copying to another rectangle */ clone(): Rectangle; /** * Converts a Bounds object to a Rectangle object. * @example * ```ts * // Convert bounds to rectangle * const bounds = container.getBounds(); * const rect = new Rectangle().copyFromBounds(bounds); * ``` * @param bounds - The bounds to copy and convert to a rectangle * @returns Returns itself * @see {@link Bounds} For bounds object structure * @see {@link Rectangle.getBounds} For getting rectangle bounds */ copyFromBounds(bounds: Bounds): this; /** * Copies another rectangle to this one. * @example * ```ts * // Basic copying * const source = new Rectangle(100, 100, 200, 150); * const target = new Rectangle(); * target.copyFrom(source); * * // Chain with other operations * const rect = new Rectangle() * .copyFrom(source) * .pad(10); * ``` * @param rectangle - The rectangle to copy from * @returns Returns itself * @see {@link Rectangle.copyTo} For copying to another rectangle * @see {@link Rectangle.clone} For creating new rectangle copy */ copyFrom(rectangle: Rectangle): Rectangle; /** * Copies this rectangle to another one. * @example * ```ts * // Basic copying * const source = new Rectangle(100, 100, 200, 150); * const target = new Rectangle(); * source.copyTo(target); * * // Chain with other operations * const result = source * .copyTo(new Rectangle()) * .getBounds(); * ``` * @param rectangle - The rectangle to copy to * @returns Returns given parameter * @see {@link Rectangle.copyFrom} For copying from another rectangle * @see {@link Rectangle.clone} For creating new rectangle copy */ copyTo(rectangle: Rectangle): Rectangle; /** * Checks whether the x and y coordinates given are contained within this Rectangle * @example * ```ts * // Basic containment check * const rect = new Rectangle(100, 100, 200, 150); * const isInside = rect.contains(150, 125); // true * // Check edge cases * console.log(rect.contains(100, 100)); // true (on edge) * console.log(rect.contains(300, 250)); // false (outside) * ``` * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @returns Whether the x/y coordinates are within this Rectangle * @see {@link Rectangle.containsRect} For rectangle containment * @see {@link Rectangle.strokeContains} For checking stroke intersection */ contains(x: number, y: number): boolean; /** * Checks whether the x and y coordinates given are contained within this rectangle including the stroke. * @example * ```ts * // Basic stroke check * const rect = new Rectangle(100, 100, 200, 150); * const isOnStroke = rect.strokeContains(150, 100, 4); // 4px line width * * // Check with different alignments * const innerStroke = rect.strokeContains(150, 100, 4, 1); // Inside * const centerStroke = rect.strokeContains(150, 100, 4, 0.5); // Centered * const outerStroke = rect.strokeContains(150, 100, 4, 0); // Outside * ``` * @param x - The X coordinate of the point to test * @param y - The Y coordinate of the point to test * @param strokeWidth - The width of the line to check * @param alignment - The alignment of the stroke (1 = inner, 0.5 = centered, 0 = outer) * @returns Whether the x/y coordinates are within this rectangle's stroke * @see {@link Rectangle.contains} For checking fill containment * @see {@link Rectangle.getBounds} For getting stroke bounds */ strokeContains(x: number, y: number, strokeWidth: number, alignment?: number): boolean; /** * Determines whether the `other` Rectangle transformed by `transform` intersects with `this` Rectangle object. * Returns true only if the area of the intersection is >0, this means that Rectangles * sharing a side are not overlapping. Another side effect is that an arealess rectangle * (width or height equal to zero) can't intersect any other rectangle. * @param {Rectangle} other - The Rectangle to intersect with `this`. * @param {Matrix} transform - The transformation matrix of `other`. * @returns {boolean} A value of `true` if the transformed `other` Rectangle intersects with `this`; otherwise `false`. */ /** * Determines whether the `other` Rectangle transformed by `transform` intersects with `this` Rectangle object. * * Returns true only if the area of the intersection is greater than 0. * This means that rectangles sharing only a side are not considered intersecting. * @example * ```ts * // Basic intersection check * const rect1 = new Rectangle(0, 0, 100, 100); * const rect2 = new Rectangle(50, 50, 100, 100); * console.log(rect1.intersects(rect2)); // true * * // With transformation matrix * const matrix = new Matrix(); * matrix.rotate(Math.PI / 4); // 45 degrees * console.log(rect1.intersects(rect2, matrix)); // Checks with rotation * * // Edge cases * const zeroWidth = new Rectangle(0, 0, 0, 100); * console.log(rect1.intersects(zeroWidth)); // false (no area) * ``` * @remarks * - Returns true only if intersection area is > 0 * - Rectangles sharing only a side are not intersecting * - Zero-area rectangles cannot intersect anything * - Supports optional transformation matrix * @param other - The Rectangle to intersect with `this` * @param transform - Optional transformation matrix of `other` * @returns True if the transformed `other` Rectangle intersects with `this` * @see {@link Rectangle.containsRect} For containment testing * @see {@link Rectangle.contains} For point testing */ intersects(other: Rectangle, transform?: Matrix): boolean; /** * Pads the rectangle making it grow in all directions. * * If paddingY is omitted, both paddingX and paddingY will be set to paddingX. * @example * ```ts * // Basic padding * const rect = new Rectangle(100, 100, 200, 150); * rect.pad(10); // Adds 10px padding on all sides * * // Different horizontal and vertical padding * const uiRect = new Rectangle(0, 0, 100, 50); * uiRect.pad(20, 10); // 20px horizontal, 10px vertical * ``` * @remarks * - Adjusts x/y by subtracting padding * - Increases width/height by padding * 2 * - Common in UI layout calculations * - Chainable with other methods * @param paddingX - The horizontal padding amount * @param paddingY - The vertical padding amount * @returns Returns itself * @see {@link Rectangle.enlarge} For growing to include another rectangle * @see {@link Rectangle.fit} For shrinking to fit within another rectangle */ pad(paddingX?: number, paddingY?: number): this; /** * Fits this rectangle around the passed one. * @example * ```ts * // Basic fitting * const container = new Rectangle(0, 0, 100, 100); * const content = new Rectangle(25, 25, 200, 200); * content.fit(container); // Clips to container bounds * ``` * @param rectangle - The rectangle to fit around * @returns Returns itself * @see {@link Rectangle.enlarge} For growing to include another rectangle * @see {@link Rectangle.pad} For adding padding around the rectangle */ fit(rectangle: Rectangle): this; /** * Enlarges rectangle so that its corners lie on a grid defined by resolution. * @example * ```ts * // Basic grid alignment * const rect = new Rectangle(10.2, 10.6, 100.8, 100.4); * rect.ceil(); // Aligns to whole pixels * * // Custom resolution grid * const uiRect = new Rectangle(5.3, 5.7, 50.2, 50.8); * uiRect.ceil(0.5); // Aligns to half pixels * * // Use with precision value * const preciseRect = new Rectangle(20.001, 20.999, 100.001, 100.999); * preciseRect.ceil(1, 0.01); // Handles small decimal variations * ``` * @param resolution - The grid size to align to (1 = whole pixels) * @param eps - Small number to prevent floating point errors * @returns Returns itself * @see {@link Rectangle.fit} For constraining to bounds * @see {@link Rectangle.enlarge} For growing dimensions */ ceil(resolution?: number, eps?: number): this; /** * Scales the rectangle's dimensions and position by the specified factors. * @example * ```ts * const rect = new Rectangle(50, 50, 100, 100); * * // Scale uniformly * rect.scale(0.5, 0.5); * // rect is now: x=25, y=25, width=50, height=50 * * // non-uniformly * rect.scale(0.5, 1); * // rect is now: x=25, y=50, width=50, height=100 * ``` * @param x - The factor by which to scale the horizontal properties (x, width). * @param y - The factor by which to scale the vertical properties (y, height). * @returns Returns itself */ scale(x: number, y?: number): this; /** * Enlarges this rectangle to include the passed rectangle. * @example * ```ts * // Basic enlargement * const rect = new Rectangle(50, 50, 100, 100); * const other = new Rectangle(0, 0, 200, 75); * rect.enlarge(other); * // rect is now: x=0, y=0, width=200, height=150 * * // Use for bounding box calculation * const bounds = new Rectangle(); * objects.forEach((obj) => { * bounds.enlarge(obj.getBounds()); * }); * ``` * @param rectangle - The rectangle to include * @returns Returns itself * @see {@link Rectangle.fit} For shrinking to fit within another rectangle * @see {@link Rectangle.pad} For adding padding around the rectangle */ enlarge(rectangle: Rectangle): this; /** * Returns the framing rectangle of the rectangle as a Rectangle object * @example * ```ts * // Basic bounds retrieval * const rect = new Rectangle(100, 100, 200, 150); * const bounds = rect.getBounds(); * * // Reuse existing rectangle * const out = new Rectangle(); * rect.getBounds(out); * ``` * @param out - Optional rectangle to store the result * @returns The framing rectangle * @see {@link Rectangle.copyFrom} For direct copying * @see {@link Rectangle.clone} For creating new copy */ getBounds(out?: Rectangle): Rectangle; /** * Determines whether another Rectangle is fully contained within this Rectangle. * * Rectangles that occupy the same space are considered to be containing each other. * * Rectangles without area (width or height equal to zero) can't contain anything, * not even other arealess rectangles. * @example * ```ts * // Check if one rectangle contains another * const container = new Rectangle(0, 0, 100, 100); * const inner = new Rectangle(25, 25, 50, 50); * * console.log(container.containsRect(inner)); // true * * // Check overlapping rectangles * const partial = new Rectangle(75, 75, 50, 50); * console.log(container.containsRect(partial)); // false * * // Zero-area rectangles * const empty = new Rectangle(0, 0, 0, 100); * console.log(container.containsRect(empty)); // false * ``` * @param other - The Rectangle to check for containment * @returns True if other is fully contained within this Rectangle * @see {@link Rectangle.contains} For point containment * @see {@link Rectangle.intersects} For overlap testing */ containsRect(other: Rectangle): boolean; /** * Sets the position and dimensions of the rectangle. * @example * ```ts * // Basic usage * const rect = new Rectangle(); * rect.set(100, 100, 200, 150); * * // Chain with other operations * const bounds = new Rectangle() * .set(0, 0, 100, 100) * .pad(10); * ``` * @param x - The X coordinate of the upper-left corner of the rectangle * @param y - The Y coordinate of the upper-left corner of the rectangle * @param width - The overall width of the rectangle * @param height - The overall height of the rectangle * @returns Returns itself for method chaining * @see {@link Rectangle.copyFrom} For copying from another rectangle * @see {@link Rectangle.clone} For creating a new copy */ set(x: number, y: number, width: number, height: number): this; toString(): string; } /** * Base options for destroying display objects. * Controls how deep the destruction process should go through the display tree. * @example * ```ts * // Basic destruction - only this container * container.destroy({ children: false }); * * // Deep destruction - container and all children * container.destroy({ children: true }); * * // Cleanup pattern * function cleanupScene(scene: Container) { * // Remove from parent first * scene.parent?.removeChild(scene); * // Then destroy with all children * scene.destroy({ children: true }); * } * ``` * @see {@link Container#destroy} For destruction method * @see {@link DestroyOptions} For all destroy options * @category scene * @standard */ export interface BaseDestroyOptions { /** * Whether to destroy children recursively. * When true, runs destroy() on all children in the display tree. * @default false * @example * ```js * container.destroy({ children: true }); * ``` */ children?: boolean; } /** * Options when destroying textures through `.destroy()` calls. * Controls how thoroughly textures and their sources are cleaned up. * @example * ```ts * // Basic texture cleanup * sprite.destroy({ * texture: true * }); * * // Complete texture cleanup * sprite.destroy({ * texture: true, * textureSource: true * }); * ``` * @see {@link Container#destroy} For general destruction * @see {@link Texture#destroy} For texture cleanup * @category scene * @standard */ export interface TextureDestroyOptions { /** * Whether to destroy the texture for the display object. * @default false * @example * ```js * texturedObject.destroy({ texture: true }); * ``` */ texture?: boolean; /** * Whether to destroy the underlying texture source. * Use carefully with shared texture sources. * @default false * @example * ```js * texturedObject.destroy({ textureSource: true }); * ``` */ textureSource?: boolean; } /** * Options when destroying a graphics context. * Controls the cleanup of graphics-specific resources. * @example * ```ts * // Basic context cleanup * graphics.destroy({ * context: true * }); * * // Full graphics cleanup * graphics.destroy({ * context: true, * texture: true, * textureSource: true * }); * ``` * @see {@link Graphics#destroy} For graphics destruction * @see {@link DestroyOptions} For all destroy options * @category scene * @standard */ export interface ContextDestroyOptions { /** * Whether to destroy the graphics context associated with the graphics object. * @default false * @example * ```js * graphics.destroy({ context: true }); * ``` */ context?: boolean; } /** * Options when destroying a text object. Controls whether associated text styles * should be cleaned up along with the text object itself. * ```ts * // Basic text cleanup * text.destroy({ style: false }); // Keep style for reuse * text.destroy({ style: true }); // Destroy style as well * ``` * @category text * @standard */ export interface TextDestroyOptions { /** * Whether to destroy the text style object along with the text. * Use carefully with shared styles. * @default false */ style?: boolean; } /** * A utility type that allows a type to be either the specified type or a boolean. * This is useful for options that can be either a specific value or a boolean flag. * @category utils * @advanced */ export type TypeOrBool = T | boolean; /** * Options for destroying a container and its resources. * Combines all destroy options into a single configuration object. * @example * ```ts * // Destroy the container and all its children, including textures and styles * container.destroy({ * children: true, * texture: true, * textureSource: true, * context: true, * style: true * }); * ``` * @category scene * @standard */ export type DestroyOptions = TypeOrBool; /** * Constants used by the renderer for clearing the screen or render textures. * @category rendering * @advanced */ export declare enum CLEAR { /** No clear operation. */ NONE = 0, /** Clear the color buffer. */ COLOR = 16384, /** Clear the stencil buffer. */ STENCIL = 1024, /** Clear the depth buffer. */ DEPTH = 256, /** Clear the color and depth buffers. */ COLOR_DEPTH = 16640, /** Clear the color and stencil buffers. */ COLOR_STENCIL = 17408, /** Clear the depth and stencil buffers. */ DEPTH_STENCIL = 1280, /** Clear the color, depth, and stencil buffers. */ ALL = 17664 } /** * Used for clearing render textures. true is the same as `ALL` false is the same as `NONE` * @category rendering * @advanced */ export type CLEAR_OR_BOOL = CLEAR | boolean; /** * Collection of valid extension types. * @category extensions * @advanced */ export declare enum ExtensionType { /** extensions that are registered as Application plugins */ Application = "application", /** extensions that are registered as WebGL render pipes */ WebGLPipes = "webgl-pipes", /** extensions that are registered as WebGL render pipes adaptors */ WebGLPipesAdaptor = "webgl-pipes-adaptor", /** extensions that are registered as WebGL render systems */ WebGLSystem = "webgl-system", /** extensions that are registered as WebGPU render pipes */ WebGPUPipes = "webgpu-pipes", /** extensions that are registered as WebGPU render pipes adaptors */ WebGPUPipesAdaptor = "webgpu-pipes-adaptor", /** extensions that are registered as WebGPU render systems */ WebGPUSystem = "webgpu-system", /** extensions that are registered as Canvas render pipes */ CanvasSystem = "canvas-system", /** extensions that are registered as Canvas render pipes adaptors */ CanvasPipesAdaptor = "canvas-pipes-adaptor", /** extensions that are registered as Canvas render systems */ CanvasPipes = "canvas-pipes", /** extensions that combine the other Asset extensions */ Asset = "asset", /** extensions that are used to load assets through Assets */ LoadParser = "load-parser", /** extensions that are used to resolve asset urls through Assets */ ResolveParser = "resolve-parser", /** extensions that are used to handle how urls are cached by Assets */ CacheParser = "cache-parser", /** extensions that are used to add/remove available resources from Assets */ DetectionParser = "detection-parser", /** extensions that are registered with the MaskEffectManager */ MaskEffect = "mask-effect", /** A type of extension for creating a new advanced blend mode */ BlendMode = "blend-mode", /** A type of extension that will be used to auto detect a resource type */ TextureSource = "texture-source", /** A type of extension that will be used to auto detect an environment */ Environment = "environment", /** A type of extension for building and triangulating custom shapes used in graphics. */ ShapeBuilder = "shape-builder", /** A type of extension for creating custom batchers used in rendering. */ Batcher = "batcher" } /** * The metadata for an extension. * @category extensions * @ignore */ export interface ExtensionMetadataDetails { /** The extension type, can be multiple types */ type: ExtensionType | ExtensionType[]; /** Optional. Some plugins provide an API name/property, to make them more easily accessible */ name?: string; /** Optional, used for sorting the plugins in a particular order */ priority?: number; } /** * The metadata for an extension. * @category extensions * @advanced */ export type ExtensionMetadata = ExtensionType | ExtensionMetadataDetails; /** * Format when registering an extension. Generally, the extension * should have these values as `extension` static property, * but you can override name or type by providing an object. * @category extensions * @advanced */ interface ExtensionFormat { /** The extension type, can be multiple types */ type: ExtensionType | ExtensionType[]; /** Optional. Some plugins provide an API name/property, such as Renderer plugins */ name?: string; /** Optional, used for sorting the plugins in a particular order */ priority?: number; /** Reference to the plugin object/class */ ref: any; } /** * Extension format that is used internally for registrations. * @category extensions * @ignore */ interface StrictExtensionFormat extends ExtensionFormat { /** The extension type, always expressed as multiple, even if a single */ type: ExtensionType[]; } /** * The function that is called when an extension is added or removed. * @category extensions * @ignore */ export type ExtensionHandler = (extension: StrictExtensionFormat) => void; /** * Get the priority for an extension. * @ignore * @param ext - Any extension * @param defaultPriority - Fallback priority if none is defined. * @returns The priority for the extension. * @category extensions */ export declare const normalizeExtensionPriority: (ext: ExtensionFormat | any, defaultPriority: number) => number; /** * Global registration system for all PixiJS extensions. Provides a centralized way to add, remove, * and manage functionality across the engine. * * Features: * - Register custom extensions and plugins * - Handle multiple extension types * - Priority-based ordering * @example * ```ts * import { extensions, ExtensionType } from 'pixi.js'; * * // Register a simple object extension * extensions.add({ * extension: { * type: ExtensionType.LoadParser, * name: 'my-loader', * priority: 100, // Optional priority for ordering * }, * // add load parser functions * }); * * // Register a class-based extension * class MyRendererPlugin { * static extension = { * type: [ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem], * name: 'myRendererPlugin' * }; * * // add renderer plugin methods * } * extensions.add(MyRendererPlugin); * * // Remove extensions * extensions.remove(MyRendererPlugin); * ``` * @remarks * - Extensions must have a type from {@link ExtensionType} * - Can be registered before or after their handlers * - Supports priority-based ordering * - Automatically normalizes extension formats * @see {@link ExtensionType} For all available extension types * @see {@link ExtensionFormat} For extension registration format * @see {@link Application} For application plugin system * @see {@link LoaderParser} For asset loading extensions * @category extensions * @standard * @class */ export declare const extensions: { /** @ignore */ _addHandlers: Partial>; /** @ignore */ _removeHandlers: Partial>; /** @ignore */ _queue: Partial>; /** * Remove extensions from PixiJS. * @param extensions - Extensions to be removed. Can be: * - Extension class with static `extension` property * - Extension format object with `type` and `ref` * - Multiple extensions as separate arguments * @returns {extensions} this for chaining * @example * ```ts * // Remove a single extension * extensions.remove(MyRendererPlugin); * * // Remove multiple extensions * extensions.remove( * MyRendererPlugin, * MySystemPlugin * ); * ``` * @see {@link ExtensionType} For available extension types * @see {@link ExtensionFormat} For extension format details */ remove(...extensions: Array): any; /** * Register new extensions with PixiJS. Extensions can be registered in multiple formats: * - As a class with a static `extension` property * - As an extension format object * - As multiple extensions passed as separate arguments * @param extensions - Extensions to add to PixiJS. Each can be: * - A class with static `extension` property * - An extension format object with `type` and `ref` * - Multiple extensions as separate arguments * @returns This extensions instance for chaining * @example * ```ts * // Register a simple extension * extensions.add(MyRendererPlugin); * * // Register multiple extensions * extensions.add( * MyRendererPlugin, * MySystemPlugin, * }); * ``` * @see {@link ExtensionType} For available extension types * @see {@link ExtensionFormat} For extension format details * @see {@link extensions.remove} For removing registered extensions */ add(...extensions: Array): any; /** * Internal method to handle extensions by name. * @param type - The extension type. * @param onAdd - Function handler when extensions are added/registered {@link StrictExtensionFormat}. * @param onRemove - Function handler when extensions are removed/unregistered {@link StrictExtensionFormat}. * @returns this for chaining. * @internal * @ignore */ handle(type: ExtensionType, onAdd: ExtensionHandler, onRemove: ExtensionHandler): any; /** * Handle a type, but using a map by `name` property. * @param type - Type of extension to handle. * @param map - The object map of named extensions. * @returns this for chaining. * @ignore */ handleByMap(type: ExtensionType, map: Record): any; /** * Handle a type, but using a list of extensions with a `name` property. * @param type - Type of extension to handle. * @param map - The array of named extensions. * @param defaultPriority - Fallback priority if none is defined. * @returns this for chaining. * @ignore */ handleByNamedList(type: ExtensionType, map: { name: string; value: any; }[], defaultPriority?: number): any; /** * Handle a type, but using a list of extensions. * @param type - Type of extension to handle. * @param list - The list of extensions. * @param defaultPriority - The default priority to use if none is specified. * @returns this for chaining. * @ignore */ handleByList(type: ExtensionType, list: any[], defaultPriority?: number): any; /** * Mixin the source object(s) properties into the target class's prototype. * Copies all property descriptors from source objects to the target's prototype. * @param Target - The target class to mix properties into * @param sources - One or more source objects containing properties to mix in * @example * ```ts * // Create a mixin with shared properties * const moveable = { * x: 0, * y: 0, * move(x: number, y: number) { * this.x += x; * this.y += y; * } * }; * * // Create a mixin with computed properties * const scalable = { * scale: 1, * get scaled() { * return this.scale > 1; * } * }; * * // Apply mixins to a class * extensions.mixin(Sprite, moveable, scalable); * * // Use mixed-in properties * const sprite = new Sprite(); * sprite.move(10, 20); * console.log(sprite.x, sprite.y); // 10, 20 * ``` * @remarks * - Copies all properties including getters/setters * - Does not modify source objects * - Preserves property descriptors * @see {@link Object.defineProperties} For details on property descriptors * @see {@link Object.getOwnPropertyDescriptors} For details on property copying */ mixin(Target: any, ...sources: Parameters[0][]): void; }; /** * A system is a generic interface for a renderer system. * It is used to define the methods that a system should implement. * @category rendering * @advanced */ export interface System { init?: (options: INIT_OPTIONS) => void; /** Generic destroy methods to be overridden by the subclass */ destroy?: (options?: DESTROY_OPTIONS) => void; } /** * The constructor for a System. * It is used to create instances of systems that can be added to a renderer. * @category rendering * @advanced */ export interface SystemConstructor { new (renderer: Renderer): System; } /** * Options for the background system. * @category rendering * @advanced */ export interface BackgroundSystemOptions { /** * The background color used to clear the canvas. See {@link ColorSource} for accepted color values. * @default 'black' */ backgroundColor: ColorSource; /** Alias for `backgroundColor` */ background?: ColorSource; /** * Transparency of the background color, value from `0` (fully transparent) to `1` (fully opaque). * This value determines whether the canvas is initialized with alpha transparency support. * Note: This cannot be changed after initialization. If set to `1`, the canvas will remain opaque, * even if a transparent background color is set later. * @default 1 */ backgroundAlpha?: number; /** * Whether to clear the canvas before new render passes. * @default true */ clearBeforeRender?: boolean; } /** * The background system manages the background color and alpha of the main view. * @category rendering * @advanced */ export declare class BackgroundSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "background"; readonly priority: 0; }; /** default options used by the system */ static defaultOptions: BackgroundSystemOptions; /** * This sets if the CanvasRenderer will clear the canvas or not before the new render pass. * If the scene is NOT transparent PixiJS will use a canvas sized fillRect operation every * frame to set the canvas background color. If the scene is transparent PixiJS will use clearRect * to clear the canvas every frame. Disable this by setting this to false. For example, if * your game has a canvas filling background image you often don't need this set. */ clearBeforeRender: boolean; private readonly _backgroundColor; constructor(); /** * initiates the background system * @param options - the options for the background colors */ init(options: BackgroundSystemOptions): void; /** The background color to fill if not transparent */ get color(): Color; set color(value: ColorSource); /** The background color alpha. Setting this to 0 will make the canvas transparent. */ get alpha(): number; set alpha(value: number); /** The background color as an [R, G, B, A] array. */ get colorRgba(): RgbaArray; /** * destroys the background system * @internal */ destroy(): void; } /** * A bind group is a collection of resources that are bound together for use by a shader. * They are essentially a wrapper for the WebGPU BindGroup class. But with the added bonus * that WebGL can also work with them. * @see https://gpuweb.github.io/gpuweb/#dictdef-gpubindgroupdescriptor * @example * // Create a bind group with a single texture and sampler * const bindGroup = new BindGroup({ * uTexture: texture.source, * uTexture: texture.style, * }); * * Bind groups resources must implement the {@link BindResource} interface. * The following resources are supported: * - {@link TextureSource} * - {@link TextureStyle} * - {@link Buffer} * - {@link BufferResource} * - {@link UniformGroup} * * The keys in the bind group must correspond to the names of the resources in the GPU program. * * This bind group class will also watch for changes in its resources ensuring that the changes * are reflected in the WebGPU BindGroup. * @category rendering * @advanced */ export declare class BindGroup { /** The resources that are bound together for use by a shader. */ resources: Record; /** * a key used internally to match it up to a WebGPU Bindgroup * @internal */ _key: string; private _dirty; /** * Create a new instance eof the Bind Group. * @param resources - The resources that are bound together for use by a shader. */ constructor(resources?: Record); /** * Updates the key if its flagged as dirty. This is used internally to * match this bind group to a WebGPU BindGroup. * @internal */ _updateKey(): void; /** * Set a resource at a given index. this function will * ensure that listeners will be removed from the current resource * and added to the new resource. * @param resource - The resource to set. * @param index - The index to set the resource at. */ setResource(resource: BindResource, index: number): void; /** * Returns the resource at the current specified index. * @param index - The index of the resource to get. * @returns - The resource at the specified index. */ getResource(index: number): BindResource; /** * Used internally to 'touch' each resource, to ensure that the GC * knows that all resources in this bind group are still being used. * @param tick - The current tick. * @internal */ _touch(tick: number): void; /** Destroys this bind group and removes all listeners. */ destroy(): void; protected onResourceChange(resource: BindResource): void; } /** * an interface that allows a resource to be bound to the gpu in a bind group * @category rendering * @advanced */ export interface BindResource { /** * The type of resource this is * @ignore */ _resourceType: string; /** * Unique id for this resource this can change and is used to link the gpu * @ignore */ _resourceId: number; _touched: number; /** * a boolean that indicates if the resource has been destroyed. * If true, the resource should not be used and any bind groups * that will release any references to this resource. * @ignore */ destroyed: boolean; /** * event dispatch whenever the underlying resource needs to change * this could be a texture or buffer that has been resized. * This is important as it allows the renderer to know that it needs to rebind the resource */ on?(event: "change", listenerFunction: (resource: BindResource) => void, listener: BindGroup): void; /** todo */ off?(event: "change", listenerFunction: (resource: BindResource) => void, listener: BindGroup): void; } /** * Specifies the alpha composition mode for textures. * * - `no-premultiply-alpha`: Does not premultiply alpha. * - `premultiply-alpha-on-upload`: Premultiplies alpha on texture upload. * - `premultiplied-alpha`: Assumes the texture is already in premultiplied alpha format. * @category rendering * @advanced */ export type ALPHA_MODES = "no-premultiply-alpha" | "premultiply-alpha-on-upload" | "premultiplied-alpha"; /** * The texture formats that are supported by pixi. * * These formats are used to specify the format of textures in WebGPU and WebGL. * They include various uncompressed, compressed, and depth/stencil formats. * @category rendering * @advanced */ export type TEXTURE_FORMATS = "r8unorm" | "r8snorm" | "r8uint" | "r8sint" | "r16uint" | "r16sint" | "r16float" | "rg8unorm" | "rg8snorm" | "rg8uint" | "rg8sint" | "r32uint" | "r32sint" | "r32float" | "rg16uint" | "rg16sint" | "rg16float" | "rgba8unorm" | "rgba8unorm-srgb" | "rgba8snorm" | "rgba8uint" | "rgba8sint" | "bgra8unorm" | "bgra8unorm-srgb" | "rgb9e5ufloat" | "rgb10a2unorm" | "rg11b10ufloat" | "rg32uint" | "rg32sint" | "rg32float" | "rgba16uint" | "rgba16sint" | "rgba16float" | "rgba32uint" | "rgba32sint" | "rgba32float" | "stencil8" | "depth16unorm" | "depth24plus" | "depth24plus-stencil8" | "depth32float" | "depth32float-stencil8" | "bc1-rgba-unorm" | "bc1-rgba-unorm-srgb" | "bc2-rgba-unorm" | "bc2-rgba-unorm-srgb" | "bc3-rgba-unorm" | "bc3-rgba-unorm-srgb" | "bc4-r-unorm" | "bc4-r-snorm" | "bc5-rg-unorm" | "bc5-rg-snorm" | "bc6h-rgb-ufloat" | "bc6h-rgb-float" | "bc7-rgba-unorm" | "bc7-rgba-unorm-srgb" | "etc2-rgb8unorm" | "etc2-rgb8unorm-srgb" | "etc2-rgb8a1unorm" | "etc2-rgb8a1unorm-srgb" | "etc2-rgba8unorm" | "etc2-rgba8unorm-srgb" | "eac-r11unorm" | "eac-r11snorm" | "eac-rg11unorm" | "eac-rg11snorm" | "astc-4x4-unorm" | "astc-4x4-unorm-srgb" | "astc-5x4-unorm" | "astc-5x4-unorm-srgb" | "astc-5x5-unorm" | "astc-5x5-unorm-srgb" | "astc-6x5-unorm" | "astc-6x5-unorm-srgb" | "astc-6x6-unorm" | "astc-6x6-unorm-srgb" | "astc-8x5-unorm" | "astc-8x5-unorm-srgb" | "astc-8x6-unorm" | "astc-8x6-unorm-srgb" | "astc-8x8-unorm" | "astc-8x8-unorm-srgb" | "astc-10x5-unorm" | "astc-10x5-unorm-srgb" | "astc-10x6-unorm" | "astc-10x6-unorm-srgb" | "astc-10x8-unorm" | "astc-10x8-unorm-srgb" | "astc-10x10-unorm" | "astc-10x10-unorm-srgb" | "astc-12x10-unorm" | "astc-12x10-unorm-srgb" | "astc-12x12-unorm" | "astc-12x12-unorm-srgb"; /** * The texture dimensions that are supported by pixi. * * - `1d` is a one-dimensional texture, which is typically used for linear data. * - `2d` is a two-dimensional texture, which is commonly used for images and textures. * - `3d` is a three-dimensional texture, which is used for volumetric data or 3D textures. * @category rendering * @advanced */ export type TEXTURE_DIMENSIONS = "1d" | "2d" | "3d"; /** * The wrap modes that are supported by pixi. * * The wrap mode affects the default wrapping mode of future operations. * - `clamp-to-edge` is the default mode, which clamps the texture coordinates to the edge of the texture. * - `repeat` allows the texture to repeat in both u and v directions. * - `mirror-repeat` allows the texture to repeat in both u and v directions, but mirrors the texture on every other repeat. * @category rendering * @standard */ export type WRAP_MODE = "clamp-to-edge" | "repeat" | "mirror-repeat"; /** @internal */ export declare enum DEPRECATED_WRAP_MODES { CLAMP = "clamp-to-edge", REPEAT = "repeat", MIRRORED_REPEAT = "mirror-repeat" } /** * The wrap modes that are supported by pixi. * @deprecated since 8.0.0 * @category rendering * @see WRAP_MODE * @advanced */ export declare const WRAP_MODES: typeof DEPRECATED_WRAP_MODES; /** * The scale modes that are supported by pixi. * * The scale mode affects the default scaling mode of future operations. * It can be re-assigned to either LINEAR or NEAREST, depending upon suitability. * * - `nearest` is a pixelating scaling mode, which does not interpolate pixels. * - `linear` is a smooth scaling mode, which interpolates pixels for smoother results. * @category rendering * @standard */ export type SCALE_MODE = "nearest" | "linear"; /** @internal */ export declare enum DEPRECATED_SCALE_MODES { NEAREST = "nearest", LINEAR = "linear" } /** * The scale modes that are supported by pixi. * @deprecated since 8.0.0 * @category rendering * @see SCALE_MODE * @advanced */ export declare const SCALE_MODES: typeof DEPRECATED_SCALE_MODES; /** * The compare function types used for comparing values in various operations. * @category rendering * @advanced */ export type COMPARE_FUNCTION = "never" | "less" | "equal" | "less-equal" | "greater" | "not-equal" | "greater-equal" | "always"; /** * The options for the texture style. * @category rendering * @advanced */ export interface TextureStyleOptions extends Partial { /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ addressMode?: WRAP_MODE; /** specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeU?: WRAP_MODE; /** specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeV?: WRAP_MODE; /** Specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeW?: WRAP_MODE; /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ scaleMode?: SCALE_MODE; /** specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ magFilter?: SCALE_MODE; /** specifies the sampling behavior when the sample footprint is larger than one texel. */ minFilter?: SCALE_MODE; /** specifies behavior for sampling between mipmap levels. */ mipmapFilter?: SCALE_MODE; /** specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMinClamp?: number; /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMaxClamp?: number; /** * When provided the sampler will be a comparison sampler with the specified * {@link COMPARE_FUNCTION}. * Note: Comparison samplers may use filtering, but the sampling results will be * implementation-dependent and may differ from the normal filtering rules. */ compare?: COMPARE_FUNCTION; /** * Specifies the maximum anisotropy value clamp used by the sampler. * Note: Most implementations support {@link TextureStyle#maxAnisotropy} values in range * between 1 and 16, inclusive. The used value of {@link TextureStyle#maxAnisotropy} will * be clamped to the maximum value that the platform supports. * * setting this to anything higher than 1 will set scale modes to 'linear' */ maxAnisotropy?: number; } /** * A texture style describes how a texture should be sampled by a shader. * @category rendering * @advanced */ export declare class TextureStyle extends EventEmitter<{ change: TextureStyle; destroy: TextureStyle; }> implements BindResource { /** @internal */ _resourceType: string; /** @internal */ _touched: number; private _sharedResourceId; /** default options for the style */ static readonly defaultOptions: TextureStyleOptions; /** */ addressModeU?: WRAP_MODE; /** */ addressModeV?: WRAP_MODE; /** Specifies the {{GPUAddressMode|address modes}} for the texture width, height, and depth coordinates, respectively. */ addressModeW?: WRAP_MODE; /** Specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ magFilter?: SCALE_MODE; /** Specifies the sampling behavior when the sample footprint is larger than one texel. */ minFilter?: SCALE_MODE; /** Specifies behavior for sampling between mipmap levels. */ mipmapFilter?: SCALE_MODE; /** */ lodMinClamp?: number; /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ lodMaxClamp?: number; /** * When provided the sampler will be a comparison sampler with the specified * {@link COMPARE_FUNCTION}. * Note: Comparison samplers may use filtering, but the sampling results will be * implementation-dependent and may differ from the normal filtering rules. */ compare?: COMPARE_FUNCTION; /** * Specifies the maximum anisotropy value clamp used by the sampler. * Note: Most implementations support {@link TextureStyle#maxAnisotropy} values in range * between 1 and 16, inclusive. The used value of {@link TextureStyle#maxAnisotropy} will * be clamped to the maximum value that the platform supports. * @internal */ _maxAnisotropy?: number; /** * Has the style been destroyed? * @readonly */ destroyed: boolean; /** * @param options - options for the style */ constructor(options?: TextureStyleOptions); set addressMode(value: WRAP_MODE); /** setting this will set wrapModeU,wrapModeV and wrapModeW all at once! */ get addressMode(): WRAP_MODE; set wrapMode(value: WRAP_MODE); get wrapMode(): WRAP_MODE; set scaleMode(value: SCALE_MODE); /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ get scaleMode(): SCALE_MODE; /** Specifies the maximum anisotropy value clamp used by the sampler. */ set maxAnisotropy(value: number); get maxAnisotropy(): number; get _resourceId(): number; update(): void; private _generateResourceId; /** Destroys the style */ destroy(): void; } /** * Options for creating a CanvasSource. * @category rendering * @advanced */ export interface CanvasSourceOptions extends TextureSourceOptions { /** * Should the canvas be resized to preserve its screen width and height regardless * of the resolution of the renderer, this is only supported for HTMLCanvasElement * and will be ignored if the canvas is an OffscreenCanvas. */ autoDensity?: boolean; /** if true, this canvas will be set up to be transparent where possible */ transparent?: boolean; } /** * A texture source that uses a canvas as its resource. * It automatically resizes the canvas based on the width, height, and resolution. * It also provides a 2D rendering context for drawing. * @category rendering * @advanced */ export declare class CanvasSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; autoDensity: boolean; transparent: boolean; private _context2D; constructor(options: CanvasSourceOptions); resizeCanvas(): void; resize(width?: number, height?: number, resolution?: number): boolean; static test(resource: any): resource is ICanvas; /** * Returns the 2D rendering context for the canvas. * Caches the context after creating it. * @returns The 2D rendering context of the canvas. */ get context2D(): CanvasRenderingContext2D; } /** * The type of image-like resource that can be used as a texture source. * * - `ImageBitmap` is used for bitmap images. * - `HTMLCanvasElement` and `OffscreenCanvas` are used for canvas elements. * - `ICanvas` is an interface for canvas-like objects. * - `VideoFrame` is used for video frames. * - `HTMLImageElement` is used for HTML image elements. * - `HTMLVideoElement` is used for HTML video elements. * @category rendering * @advanced */ export type ImageResource = ImageBitmap | HTMLCanvasElement | OffscreenCanvas | ICanvas | VideoFrame | HTMLImageElement | HTMLVideoElement; /** * A texture source that uses an image-like resource as its resource. * It can handle HTMLImageElement, ImageBitmap, VideoFrame, and HTMLVideoElement. * It is used for textures that can be uploaded to the GPU. * @category rendering * @advanced */ export declare class ImageSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; constructor(options: TextureSourceOptions); static test(resource: any): resource is ImageResource; } /** * The type of resource or options that can be used to create a texture source. * This includes ImageResource, TextureSourceOptions, BufferSourceOptions, and CanvasSourceOptions. * @category rendering * @advanced */ export type TextureResourceOrOptions = ImageResource | TextureSourceOptions | BufferSourceOptions | CanvasSourceOptions; /** * @param options * @deprecated since v8.2.0 * @see TextureSource.from * @category rendering * @internal */ export declare function autoDetectSource(options?: TextureResourceOrOptions): TextureSource; /** * @param options * @param skipCache * @internal */ export declare function resourceToTexture(options?: TextureResourceOrOptions, skipCache?: boolean): Texture; /** * Helper function that creates a returns Texture based on the source you provide. * The source should be loaded and ready to go. If not its best to grab the asset using Assets. * @param id - String or Source to create texture from * @param skipCache - Skip adding the texture to the cache * @returns The texture based on the Id provided * @category utils * @internal */ export declare function textureFrom(id: TextureSourceLike, skipCache?: boolean): Texture; /** * options for creating a new TextureSource * @category rendering * @advanced */ export interface TextureSourceOptions = any> extends TextureStyleOptions { /** * the resource that will be uploaded to the GPU. This is where we get our pixels from * eg an ImageBimt / Canvas / Video etc */ resource?: T; /** the pixel width of this texture source. This is the REAL pure number, not accounting resolution */ width?: number; /** the pixel height of this texture source. This is the REAL pure number, not accounting resolution */ height?: number; /** the resolution of the texture. */ resolution?: number; /** the format that the texture data has */ format?: TEXTURE_FORMATS; /** * Used by internal textures * @ignore */ sampleCount?: number; /** * Only really affects RenderTextures. * Should we use antialiasing for this texture. It will look better, but may impact performance as a * Blit operation will be required to resolve the texture. */ antialias?: boolean; /** how many dimensions does this texture have? currently v8 only supports 2d */ dimensions?: TEXTURE_DIMENSIONS; /** The number of mip levels to generate for this texture. this is overridden if autoGenerateMipmaps is true */ mipLevelCount?: number; /** * Should we auto generate mipmaps for this texture? This will automatically generate mipmaps * for this texture when uploading to the GPU. Mipmapped textures take up more memory, but * can look better when scaled down. * * For performance reasons, it is recommended to NOT use this with RenderTextures, as they are often updated every frame. * If you do, make sure to call `updateMipmaps` after you update the texture. */ autoGenerateMipmaps?: boolean; /** the alpha mode of the texture */ alphaMode?: ALPHA_MODES; /** optional label, can be used for debugging */ label?: string; /** If true, the Garbage Collector will unload this texture if it is not used after a period of time */ autoGarbageCollect?: boolean; } /** * A TextureSource stores the information that represents an image. * All textures have require TextureSource, which contains information about the source. * Therefore you can have many textures all using a single TextureSource (eg a sprite sheet) * * This is an class is extended depending on the source of the texture. * Eg if you are using an an image as your resource, then an ImageSource is used. * @category rendering * @advanced */ export declare class TextureSource = any> extends EventEmitter<{ change: BindResource; update: TextureSource; unload: TextureSource; destroy: TextureSource; resize: TextureSource; styleChange: TextureSource; updateMipmaps: TextureSource; error: Error; }> implements BindResource { protected readonly options: TextureSourceOptions; /** The default options used when creating a new TextureSource. override these to add your own defaults */ static defaultOptions: TextureSourceOptions; /** unique id for this Texture source */ readonly uid: number; /** optional label, can be used for debugging */ label: string; /** * The resource type used by this TextureSource. This is used by the bind groups to determine * how to handle this resource. * @internal */ readonly _resourceType = "textureSource"; /** * i unique resource id, used by the bind group systems. * This can change if the texture is resized or its resource changes * @internal */ _resourceId: number; /** * this is how the backends know how to upload this texture to the GPU * It changes depending on the resource type. Classes that extend TextureSource * should override this property. * @internal */ uploadMethodId: string; /** @internal */ _resolution: number; /** the pixel width of this texture source. This is the REAL pure number, not accounting resolution */ pixelWidth: number; /** the pixel height of this texture source. This is the REAL pure number, not accounting resolution */ pixelHeight: number; /** * the width of this texture source, accounting for resolution * eg pixelWidth 200, resolution 2, then width will be 100 */ width: number; /** * the height of this texture source, accounting for resolution * eg pixelHeight 200, resolution 2, then height will be 100 */ height: number; /** * the resource that will be uploaded to the GPU. This is where we get our pixels from * eg an ImageBimt / Canvas / Video etc */ resource: T; /** * The number of samples of a multisample texture. This is always 1 for non-multisample textures. * To enable multisample for a texture, set antialias to true * @internal */ sampleCount: number; /** The number of mip levels to generate for this texture. this is overridden if autoGenerateMipmaps is true */ mipLevelCount: number; /** * Should we auto generate mipmaps for this texture? This will automatically generate mipmaps * for this texture when uploading to the GPU. Mipmapped textures take up more memory, but * can look better when scaled down. * * For performance reasons, it is recommended to NOT use this with RenderTextures, as they are often updated every frame. * If you do, make sure to call `updateMipmaps` after you update the texture. */ autoGenerateMipmaps: boolean; /** the format that the texture data has */ format: TEXTURE_FORMATS; /** how many dimensions does this texture have? currently v8 only supports 2d */ dimension: TEXTURE_DIMENSIONS; /** the alpha mode of the texture */ alphaMode: ALPHA_MODES; private _style; /** * Only really affects RenderTextures. * Should we use antialiasing for this texture. It will look better, but may impact performance as a * Blit operation will be required to resolve the texture. */ antialias: boolean; /** * Has the source been destroyed? * @readonly */ destroyed: boolean; /** * Used by automatic texture Garbage Collection, stores last GC tick when it was bound * @protected */ _touched: number; /** * Used by the batcher to build texture batches. faster to have the variable here! * @protected */ _batchTick: number; /** * A temporary batch location for the texture batching. Here for performance reasons only! * @protected */ _textureBindLocation: number; isPowerOfTwo: boolean; /** If true, the Garbage Collector will unload this texture if it is not used after a period of time */ autoGarbageCollect: boolean; /** * used internally to know where a texture came from. Usually assigned by the asset loader! * @ignore */ _sourceOrigin: string; /** * @param options - options for creating a new TextureSource */ constructor(options?: TextureSourceOptions); /** returns itself */ get source(): TextureSource; /** the style of the texture */ get style(): TextureStyle; set style(value: TextureStyle); /** Specifies the maximum anisotropy value clamp used by the sampler. */ set maxAnisotropy(value: number); get maxAnisotropy(): number; /** setting this will set wrapModeU, wrapModeV and wrapModeW all at once! */ get addressMode(): WRAP_MODE; set addressMode(value: WRAP_MODE); /** setting this will set wrapModeU, wrapModeV and wrapModeW all at once! */ get repeatMode(): WRAP_MODE; set repeatMode(value: WRAP_MODE); /** Specifies the sampling behavior when the sample footprint is smaller than or equal to one texel. */ get magFilter(): SCALE_MODE; set magFilter(value: SCALE_MODE); /** Specifies the sampling behavior when the sample footprint is larger than one texel. */ get minFilter(): SCALE_MODE; set minFilter(value: SCALE_MODE); /** Specifies behavior for sampling between mipmap levels. */ get mipmapFilter(): SCALE_MODE; set mipmapFilter(value: SCALE_MODE); /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ get lodMinClamp(): number; set lodMinClamp(value: number); /** Specifies the minimum and maximum levels of detail, respectively, used internally when sampling a texture. */ get lodMaxClamp(): number; set lodMaxClamp(value: number); private _onStyleChange; /** call this if you have modified the texture outside of the constructor */ update(): void; /** Destroys this texture source */ destroy(): void; /** * This will unload the Texture source from the GPU. This will free up the GPU memory * As soon as it is required fore rendering, it will be re-uploaded. */ unload(): void; /** the width of the resource. This is the REAL pure number, not accounting resolution */ get resourceWidth(): number; /** the height of the resource. This is the REAL pure number, not accounting resolution */ get resourceHeight(): number; /** * the resolution of the texture. Changing this number, will not change the number of pixels in the actual texture * but will the size of the texture when rendered. * * changing the resolution of this texture to 2 for example will make it appear twice as small when rendered (as pixel * density will have increased) */ get resolution(): number; set resolution(resolution: number); /** * Resize the texture, this is handy if you want to use the texture as a render texture * @param width - the new width of the texture * @param height - the new height of the texture * @param resolution - the new resolution of the texture * @returns - if the texture was resized */ resize(width?: number, height?: number, resolution?: number): boolean; /** * Lets the renderer know that this texture has been updated and its mipmaps should be re-generated. * This is only important for RenderTexture instances, as standard Texture instances will have their * mipmaps generated on upload. You should call this method after you make any change to the texture * * The reason for this is is can be quite expensive to update mipmaps for a texture. So by default, * We want you, the developer to specify when this action should happen. * * Generally you don't want to have mipmaps generated on Render targets that are changed every frame, */ updateMipmaps(): void; set wrapMode(value: WRAP_MODE); get wrapMode(): WRAP_MODE; set scaleMode(value: SCALE_MODE); /** setting this will set magFilter,minFilter and mipmapFilter all at once! */ get scaleMode(): SCALE_MODE; /** * Refresh check for isPowerOfTwo texture based on size * @private */ protected _refreshPOT(): void; static test(_resource: any): any; /** * A helper function that creates a new TextureSource based on the resource you provide. * @param resource - The resource to create the texture source from. */ static from: (resource: TextureResourceOrOptions) => TextureSource; } /** * Buffer usage flags. they can be combined using the bitwise OR operator * eg : BufferUsage.VERTEX | BufferUsage.INDEX * @category rendering * @advanced */ export declare enum BufferUsage { /** * The buffer can be mapped for reading. (Example: calling mapAsync() with GPUMapMode.READ) * May only be combined with COPY_DST. */ MAP_READ = 1, /** * The buffer can be mapped for writing. (Example: calling mapAsync() with GPUMapMode.WRITE) * May only be combined with COPY_SRC. */ MAP_WRITE = 2, /** * The buffer can be used as the source of a copy operation. * (Examples: as the source argument of a copyBufferToBuffer() or copyBufferToTexture() call.) */ COPY_SRC = 4, /** * The buffer can be used as the destination of a copy or write operation. * (Examples: as the destination argument of a copyBufferToBuffer() or * copyTextureToBuffer() call, or as the target of a writeBuffer() call.) */ COPY_DST = 8, /** The buffer can be used as an index buffer. (Example: passed to setIndexBuffer().) */ INDEX = 16, /** The buffer can be used as a vertex buffer. (Example: passed to setVertexBuffer().) */ VERTEX = 32, /** * The buffer can be used as a uniform buffer. * (Example: as a bind group entry for a GPUBufferBindingLayout with a buffer.type of "uniform".) */ UNIFORM = 64, /** * The buffer can be used as a storage buffer. * (Example: as a bind group entry for a GPUBufferBindingLayout with a buffer.type of "storage" or "read-only-storage".) */ STORAGE = 128, /** * The buffer can be used as to store indirect command arguments. * (Examples: as the indirectBuffer argument of a drawIndirect() or dispatchWorkgroupsIndirect() call.) */ INDIRECT = 256, /** * The buffer can be used to capture query results. * (Example: as the destination argument of a resolveQuerySet() call.) */ QUERY_RESOLVE = 512, /** the buffer will not be updated frequently */ STATIC = 1024 } /** * All the various typed arrays that exist in js * @category rendering * @advanced */ export type TypedArray = Int8Array | Uint8Array | Int16Array | Uint16Array | Int32Array | Uint32Array | Uint8ClampedArray | Float32Array | Float64Array; /** * Options for creating a buffer * * This interface defines the options that can be passed to the Buffer constructor. * It includes the data to initialize the buffer with, the size of the buffer, * the usage of the buffer, a label for debugging, and whether the buffer should shrink to fit * when the data becomes smaller. * @category rendering * @advanced */ export interface BufferOptions { /** * the data to initialize the buffer with, this can be a typed array, * or a regular number array. If it is a number array, it will be converted to a Float32Array */ data?: TypedArray | number[]; /** the size of the buffer in bytes, if not supplied, it will be inferred from the data */ size?: number; /** the usage of the buffer, see {@link BufferUsage} */ usage: number; /** a label for the buffer, this is useful for debugging */ label?: string; /** * should the GPU buffer be shrunk when the data becomes smaller? * changing this will cause the buffer to be destroyed and a new one created on the GPU * this can be expensive, especially if the buffer is already big enough! * setting this to false will prevent the buffer from being shrunk. This will yield better performance * if you are constantly setting data that is changing size often. * @default true */ shrinkToFit?: boolean; } /** @internal */ export interface BufferDescriptor { label?: string; size: GPUSize64; usage: BufferUsage; mappedAtCreation?: boolean; } /** * A wrapper for a WebGPU/WebGL Buffer. * In PixiJS, the Buffer class is used to manage the data that is sent to the GPU rendering pipeline. * It abstracts away the underlying GPU buffer and provides an interface for uploading typed arrays or other data to the GPU, * They are used in the following places: *

* .1. {@link Geometry} as attribute data or index data for geometry *
* .2. {@link UniformGroup} as an underlying buffer for uniform data *
* .3. {@link BufferResource} as an underlying part of a buffer used directly by the GPU program *
* * It is important to note that you must provide a usage type when creating a buffer. This is because * the underlying GPU buffer needs to know how it will be used. For example, if you are creating a buffer * to hold vertex data, you would use `BufferUsage.VERTEX`. This will tell the GPU that this buffer will be * used as a vertex buffer. This is important because it will affect how you can use the buffer. * * Buffers are updated by calling the {@link Buffer.update} method. This immediately updates the buffer on the GPU. * Be mindful of calling this more often than you need to. It is recommended to update buffers only when needed. * * In WebGPU, a GPU buffer cannot resized. This limitation is abstracted away, but know that resizing a buffer means * creating a brand new one and destroying the old, so it is best to limit this if possible. * @example * * const buffer = new Buffer({ * data: new Float32Array([1, 2, 3, 4]), * usage: BufferUsage.VERTEX, * }); * @category rendering * @advanced */ declare class Buffer$1 extends EventEmitter<{ change: BindResource; update: Buffer$1; destroy: Buffer$1; }> implements BindResource { /** * emits when the underlying buffer has changed shape (i.e. resized) * letting the renderer know that it needs to discard the old buffer on the GPU and create a new one * @event change */ /** * emits when the underlying buffer data has been updated. letting the renderer know * that it needs to update the buffer on the GPU * @event update */ /** * emits when the buffer is destroyed. letting the renderer know that it needs to destroy the buffer on the GPU * @event destroy */ /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** * a resource type, used to identify how to handle it when its in a bind group / shader resource * @internal */ readonly _resourceType = "buffer"; /** * the resource id used internally by the renderer to build bind group keys * @internal */ _resourceId: number; /** * used internally to know if a uniform group was used in the last render pass * @internal */ _touched: number; /** * a description of the buffer and how it should be set up on the GPU * @internal */ readonly descriptor: BufferDescriptor; /** @internal */ _updateID: number; /** @internal */ _updateSize: number; private _data; private _dataInt32; /** * should the GPU buffer be shrunk when the data becomes smaller? * changing this will cause the buffer to be destroyed and a new one created on the GPU * this can be expensive, especially if the buffer is already big enough! * setting this to false will prevent the buffer from being shrunk. This will yield better performance * if you are constantly setting data that is changing size often. * @default true */ shrinkToFit: boolean; /** * Has the buffer been destroyed? * @readonly */ destroyed: boolean; /** * Creates a new Buffer with the given options * @param options - the options for the buffer */ constructor(options: BufferOptions); /** the data in the buffer */ get data(): TypedArray; set data(value: TypedArray); get dataInt32(): Int32Array; /** whether the buffer is static or not */ get static(): boolean; set static(value: boolean); /** * Sets the data in the buffer to the given value. This will immediately update the buffer on the GPU. * If you only want to update a subset of the buffer, you can pass in the size of the data. * @param value - the data to set * @param size - the size of the data in bytes * @param syncGPU - should the buffer be updated on the GPU immediately? */ setDataWithSize(value: TypedArray, size: number, syncGPU: boolean): void; /** * updates the buffer on the GPU to reflect the data in the buffer. * By default it will update the entire buffer. If you only want to update a subset of the buffer, * you can pass in the size of the buffer to update. * @param sizeInBytes - the new size of the buffer in bytes */ update(sizeInBytes?: number): void; /** Destroys the buffer */ destroy(): void; } /** * Options for creating a BufferImageSource. * @category rendering * @advanced */ export interface BufferSourceOptions extends TextureSourceOptions { width: number; height: number; } /** * A texture source that uses a TypedArray or ArrayBuffer as its resource. * It automatically determines the format based on the type of TypedArray provided. * @category rendering * @advanced */ export declare class BufferImageSource extends TextureSource { static extension: ExtensionMetadata; uploadMethodId: string; constructor(options: BufferSourceOptions); static test(resource: any): resource is TypedArray | ArrayBuffer; } /** * Class controls uv mapping from Texture normal space to BaseTexture normal space. * * Takes `trim` and `rotate` into account. May contain clamp settings for Meshes and TilingSprite. * * Can be used in Texture `uvMatrix` field, or separately, you can use different clamp settings on the same texture. * If you want to add support for texture region of certain feature or filter, that's what you're looking for. * * Takes track of Texture changes through `_lastTextureID` private field. * Use `update()` method call to track it from outside. * @see Texture * @see Mesh * @see TilingSprite * @category rendering * @advanced */ export declare class TextureMatrix { /** * Matrix operation that converts texture region coords to texture coords * @readonly */ mapCoord: Matrix; /** * Changes frame clamping * Works with TilingSprite and Mesh * Change to 1.5 if you texture has repeated right and bottom lines, that leads to smoother borders * @default 0 */ clampOffset: number; /** * Changes frame clamping * Works with TilingSprite and Mesh * Change to -0.5 to add a pixel to the edge, recommended for transparent trimmed textures in atlas * @default 0.5 */ clampMargin: number; /** * Clamp region for normalized coords, left-top pixel center in xy , bottom-right in zw. * Calculated based on clampOffset. */ readonly uClampFrame: Float32Array; /** Normalized clamp offset. Calculated based on clampOffset. */ readonly uClampOffset: Float32Array; /** * Tracks Texture frame changes. * @ignore */ _updateID: number; /** * Tracks Texture frame changes. * @protected */ protected _textureID: number; protected _texture: Texture; /** * If texture size is the same as baseTexture. * @default false * @readonly */ isSimple: boolean; /** * @param texture - observed texture * @param clampMargin - Changes frame clamping, 0.5 by default. Use -0.5 for extra border. */ constructor(texture: Texture, clampMargin?: number); /** Texture property. */ get texture(): Texture; set texture(value: Texture); /** * Multiplies uvs array to transform * @param uvs - mesh uvs * @param [out=uvs] - output * @returns - output */ multiplyUvs(uvs: Float32Array, out?: Float32Array): Float32Array; /** * Updates matrices if texture was changed * @returns - whether or not it was updated */ update(): boolean; } /** * Stores the width of the non-scalable borders, for example when used with {@link NineSlicePlane} texture. * @category rendering * @advanced */ export interface TextureBorders { /** left border in pixels */ left: number; /** top border in pixels */ top: number; /** right border in pixels */ right: number; /** bottom border in pixels */ bottom: number; } /** * The UVs data structure for a texture. * @category rendering * @advanced */ export type UVs = { x0: number; y0: number; x1: number; y1: number; x2: number; y2: number; x3: number; y3: number; }; /** * The options that can be passed to a new Texture * @category rendering * @standard */ export interface TextureOptions { /** the underlying texture data that this texture will use */ source?: TextureSourceType; /** optional label, for debugging */ label?: string; /** The rectangle frame of the texture to show */ frame?: Rectangle; /** The area of original texture */ orig?: Rectangle; /** Trimmed rectangle of original texture */ trim?: Rectangle; /** Default anchor point used for sprite placement / rotation */ defaultAnchor?: { x: number; y: number; }; /** Default borders used for 9-slice scaling {@link NineSlicePlane}*/ defaultBorders?: TextureBorders; /** indicates how the texture was rotated by texture packer. See {@link groupD8} */ rotate?: number; /** * Set to true if you plan on modifying this texture's frame, UVs, or swapping its source at runtime. * This is false by default as it improves performance. Generally, it's recommended to create new * textures and swap those rather than modifying an existing texture's properties unless you are * working with a dynamic frames. * Not setting this to true when modifying the texture can lead to visual artifacts. * * If this is false and you modify the texture, you can manually update the sprite's texture by calling * `sprite.onViewUpdate()`. */ dynamic?: boolean; } /** * A texture that can be bound to a shader as it has a texture source. * @category rendering * @advanced */ export interface BindableTexture { source: TextureSource; } /** * A texture source can be a string, an image, a video, a canvas, or a texture resource. * @category rendering * @advanced * @see {@link TextureSource} * @see {@link TextureResourceOrOptions} * @see {@link Texture.from} */ export type TextureSourceLike = TextureSource | TextureResourceOrOptions | string; /** * A texture stores the information that represents an image or part of an image. * * A texture must have a loaded resource passed to it to work. It does not contain any * loading mechanisms. * * The Assets class can be used to load a texture from a file. This is the recommended * way as it will handle the loading and caching for you. * * ```js * * const texture = await Assets.load('assets/image.png'); * * // once Assets has loaded the image it will be available via the from method * const sameTexture = Texture.from('assets/image.png'); * // another way to access the texture once loaded * const sameAgainTexture = Assets.get('assets/image.png'); * * const sprite1 = new Sprite(texture); * * ``` * * It cannot be added to the display list directly; instead use it as the texture for a Sprite. * If no frame is provided for a texture, then the whole image is used. * * You can directly create a texture from an image and then reuse it multiple times like this : * * ```js * import { Sprite, Texture } from 'pixi.js'; * * const texture = await Assets.load('assets/image.png'); * const sprite1 = new Sprite(texture); * const sprite2 = new Sprite(texture); * ``` * * If you didn't pass the texture frame to constructor, it enables `noFrame` mode: * it subscribes on baseTexture events, it automatically resizes at the same time as baseTexture. * @category rendering * @class * @standard */ export declare class Texture extends EventEmitter<{ update: Texture; destroy: Texture; }> implements BindableTexture { /** * Helper function that creates a returns Texture based on the source you provide. * The source should be loaded and ready to go. If not its best to grab the asset using Assets. * @param id - String or Source to create texture from * @param skipCache - Skip adding the texture to the cache * @returns The texture based on the Id provided */ static from: (id: TextureSourceLike, skipCache?: boolean) => Texture; /** label used for debugging */ label?: string; /** unique id for this texture */ readonly uid: number; /** * Has the texture been destroyed? * @readonly */ destroyed: boolean; /** @internal */ _source: TextureSourceType; /** * Indicates whether the texture is rotated inside the atlas * set to 2 to compensate for texture packer rotation * set to 6 to compensate for spine packer rotation * can be used to rotate or mirror sprites * See {@link groupD8} for explanation */ readonly rotate: number; /** A uvs object based on the given frame and the texture source */ readonly uvs: UVs; /** * Anchor point that is used as default if sprite is created with this texture. * Changing the `defaultAnchor` at a later point of time will not update Sprite's anchor point. * @default {0,0} */ readonly defaultAnchor?: { x: number; y: number; }; /** * Default width of the non-scalable border that is used if 9-slice plane is created with this texture. * @since 7.2.0 * @see NineSliceSprite */ readonly defaultBorders?: TextureBorders; /** * This is the area of the BaseTexture image to actually copy to the Canvas / WebGL when rendering, * irrespective of the actual frame size or placement (which can be influenced by trimmed texture atlases) */ readonly frame: Rectangle; /** This is the area of original texture, before it was put in atlas. */ readonly orig: Rectangle; /** * This is the trimmed area of original texture, before it was put in atlas * Please call `updateUvs()` after you change coordinates of `trim` manually. */ readonly trim: Rectangle; /** * Does this Texture have any frame data assigned to it? * * This mode is enabled automatically if no frame was passed inside constructor. * * In this mode texture is subscribed to baseTexture events, and fires `update` on any change. * * Beware, after loading or resize of baseTexture event can fired two times! * If you want more control, subscribe on baseTexture itself. * @example * texture.on('update', () => {}); */ noFrame: boolean; /** * Set to true if you plan on modifying the uvs of this texture. * When this is the case, sprites and other objects using the texture will * make sure to listen for changes to the uvs and update their vertices accordingly. */ dynamic: boolean; private _textureMatrix; /** is it a texture? yes! used for type checking */ readonly isTexture = true; /** * @param {TextureOptions} options - Options for the texture */ constructor({ source, label, frame, orig, trim, defaultAnchor, defaultBorders, rotate, dynamic }?: TextureOptions); set source(value: TextureSourceType); /** the underlying source of the texture (equivalent of baseTexture in v7) */ get source(): TextureSourceType; /** returns a TextureMatrix instance for this texture. By default, that object is not created because its heavy. */ get textureMatrix(): TextureMatrix; /** The width of the Texture in pixels. */ get width(): number; /** The height of the Texture in pixels. */ get height(): number; /** Call this function when you have modified the frame of this texture. */ updateUvs(): void; /** * Destroys this texture * @param destroySource - Destroy the source when the texture is destroyed. */ destroy(destroySource?: boolean): void; /** * Call this if you have modified the `texture outside` of the constructor. * * If you have modified this texture's source, you must separately call `texture.source.update()` to see those changes. */ update(): void; /** @deprecated since 8.0.0 */ get baseTexture(): TextureSource; /** an Empty Texture used internally by the engine */ static EMPTY: Texture; /** a White texture used internally by the engine */ static WHITE: Texture; } /** * A render texture, extends `Texture`. * @see {@link Texture} * @category rendering * @advanced */ export declare class RenderTexture extends Texture { static create(options: TextureSourceOptions): RenderTexture; /** * Resizes the render texture. * @param width - The new width of the render texture. * @param height - The new height of the render texture. * @param resolution - The new resolution of the render texture. * @returns This texture. */ resize(width: number, height: number, resolution?: number): this; } /** * Options for generating a texture source. * @category rendering * @advanced * @interface */ export type GenerateTextureSourceOptions = Omit; /** * Options for generating a texture from a container. * Used to create reusable textures from display objects, which can improve performance * when the same content needs to be rendered multiple times. * @example * ```ts * // Basic texture generation * const sprite = new Sprite(texture); * const generatedTexture = renderer.generateTexture({ * target: sprite * }); * * // Generate with custom region and resolution * const texture = renderer.generateTexture({ * target: container, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2 * }); * * // Generate with background color and anti-aliasing * const highQualityTexture = renderer.generateTexture({ * target: graphics, * clearColor: '#ff0000', * antialias: true, * textureSourceOptions: { * scaleMode: 'linear' * } * }); * ``` * @category rendering * @advanced */ export type GenerateTextureOptions = { /** * The container to generate the texture from. * This can be any display object like Sprite, Container, or Graphics. * @example * ```ts * const graphics = new Graphics() * .circle(0, 0, 50) * .fill('red'); * * const texture = renderer.generateTexture({ * target: graphics * }); * ``` */ target: Container; /** * The region of the container that should be rendered. * If not specified, defaults to the local bounds of the container. * @example * ```ts * // Extract only a portion of the container * const texture = renderer.generateTexture({ * target: container, * frame: new Rectangle(10, 10, 100, 100) * }); * ``` */ frame?: Rectangle; /** * The resolution of the texture being generated. * Higher values create sharper textures at the cost of memory. * @default renderer.resolution * @example * ```ts * // Generate a high-resolution texture * const hiResTexture = renderer.generateTexture({ * target: sprite, * resolution: 2 // 2x resolution * }); * ``` */ resolution?: number; /** * The color used to clear the texture before rendering. * Can be a hex number, string, or array of numbers. * @example * ```ts * // Clear with red background * const texture = renderer.generateTexture({ * target: sprite, * clearColor: '#ff0000' * }); * * // Clear with semi-transparent black * const texture = renderer.generateTexture({ * target: sprite, * clearColor: [0, 0, 0, 0.5] * }); * ``` */ clearColor?: ColorSource; /** * Whether to enable anti-aliasing. This may affect performance. * @default false * @example * ```ts * // Generate a smooth texture * const texture = renderer.generateTexture({ * target: graphics, * antialias: true * }); * ``` */ antialias?: boolean; /** * Advanced options for configuring the texture source. * Controls texture properties like scale mode and filtering. * @advanced * @example * ```ts * const texture = renderer.generateTexture({ * target: sprite, * textureSourceOptions: { * scaleMode: 'linear', * multisample: 4 * } * }); * ``` */ textureSourceOptions?: GenerateTextureSourceOptions; }; /** * System that manages the generation of textures from display objects in the renderer. * This system is responsible for creating reusable textures from containers, sprites, and other display objects. * Available through `renderer.textureGenerator`. * @example * ```ts * import { Application, Sprite, Graphics } from 'pixi.js'; * * const app = new Application(); * await app.init(); * * // Create a complex display object * const container = new Container(); * * const graphics = new Graphics() * .circle(0, 0, 50) * .fill('red'); * * const sprite = new Sprite(texture); * sprite.x = 100; * * container.addChild(graphics, sprite); * * // Generate a texture from the container * const generatedTexture = app.renderer.textureGenerator.generateTexture({ * target: container, * resolution: 2, * antialias: true * }); * * // Use the generated texture * const newSprite = new Sprite(generatedTexture); * app.stage.addChild(newSprite); * * // Clean up when done * generatedTexture.destroy(true); * ``` * * Features: * - Convert any display object to a texture * - Support for custom regions and resolutions * - Anti-aliasing support * - Background color configuration * - Texture source options customization * * Common Use Cases: * - Creating texture atlases dynamically * - Caching complex container content * - Generating thumbnails * - Creating reusable textures from rendered content * * Performance Considerations: * - Generating textures is relatively expensive * - Cache results when possible * - Be mindful of resolution and size * - Clean up unused textures * @see {@link GenerateTextureOptions} For detailed texture generation options * @see {@link AbstractRenderer.generateTexture} For the main renderer method * @see {@link RenderTexture} For the resulting texture type * @category rendering * @standard */ export declare class GenerateTextureSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "textureGenerator"; }; private readonly _renderer; constructor(renderer: Renderer); /** * Creates a texture from a display object that can be used for creating sprites and other textures. * This is particularly useful for optimizing performance when a complex container needs to be reused. * @param options - Generate texture options or a container to convert to texture * @returns A new RenderTexture containing the rendered display object * @example * ```ts * // Basic usage with a container * const container = new Container(); * container.addChild( * new Graphics() * .circle(0, 0, 50) * .fill('red') * ); * * const texture = renderer.textureGenerator.generateTexture(container); * * // Advanced usage with options * const texture = renderer.textureGenerator.generateTexture({ * target: container, * frame: new Rectangle(0, 0, 100, 100), // Specific region * resolution: 2, // High DPI * clearColor: '#ff0000', // Red background * antialias: true // Smooth edges * }); * * // Create a sprite from the generated texture * const sprite = new Sprite(texture); * * // Clean up when done * texture.destroy(true); * ``` * @see {@link GenerateTextureOptions} For detailed texture generation options * @see {@link RenderTexture} For the type of texture created * @category rendering */ generateTexture(options: GenerateTextureOptions | Container): RenderTexture; destroy(): void; } /** * An effect that can be applied to a container. This is used to create effects such as filters/masks etc. * @category rendering * @advanced */ export interface Effect { pipe: string; priority: number; addBounds?(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds?(bounds: Bounds, localRoot: Container): void; containsPoint?(point: PointData, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; } /** * The constructor for an Effect. * It is used to create instances of effects that can be applied to containers. * @param options - The options for the effect. * @returns A new instance of the effect. * @category rendering * @advanced */ export interface EffectConstructor { new (options?: any): Effect; test?(options: any): boolean; } /** * Flexible wrapper around `ArrayBuffer` that also provides typed array views on demand. * @category utils * @advanced */ export declare class ViewableBuffer { /** The size of the buffer in bytes. */ size: number; /** Underlying `ArrayBuffer` that holds all the data and is of capacity `this.size`. */ rawBinaryData: ArrayBuffer; /** View on the raw binary data as a `Uint32Array`. */ uint32View: Uint32Array; /** View on the raw binary data as a `Float32Array`. */ float32View: Float32Array; /** View on the raw binary data as a `Uint16Array`. */ uint16View: Uint16Array; private _int8View; private _uint8View; private _int16View; private _int32View; private _float64Array; private _bigUint64Array; /** * @param length - The size of the buffer in bytes. */ constructor(length: number); /** * @param arrayBuffer - The source array buffer. */ constructor(arrayBuffer: ArrayBuffer); /** View on the raw binary data as a `Int8Array`. */ get int8View(): Int8Array; /** View on the raw binary data as a `Uint8Array`. */ get uint8View(): Uint8Array; /** View on the raw binary data as a `Int16Array`. */ get int16View(): Int16Array; /** View on the raw binary data as a `Int32Array`. */ get int32View(): Int32Array; /** View on the raw binary data as a `Float64Array`. */ get float64View(): Float64Array; /** View on the raw binary data as a `BigUint64Array`. */ get bigUint64View(): BigUint64Array; /** * Returns the view of the given type. * @param type - One of `int8`, `uint8`, `int16`, * `uint16`, `int32`, `uint32`, and `float32`. * @returns - typed array of given type */ view(type: string): TypedArray; /** Destroys all buffer references. Do not use after calling this. */ destroy(): void; /** * Returns the size of the given type in bytes. * @param type - One of `int8`, `uint8`, `int16`, * `uint16`, `int32`, `uint32`, and `float32`. * @returns - size of the type in bytes */ static sizeOf(type: string): number; } /** * Various blend modes supported by Pixi * @category filters * @standard */ export type BLEND_MODES = "inherit" | "normal" | "add" | "multiply" | "screen" | "darken" | "lighten" | "erase" | "color-dodge" | "color-burn" | "linear-burn" | "linear-dodge" | "linear-light" | "hard-light" | "soft-light" | "pin-light" | "difference" | "exclusion" | "overlay" | "saturation" | "color" | "luminosity" | "normal-npm" | "add-npm" | "screen-npm" | "none" | "subtract" | "divide" | "vivid-light" | "hard-mix" | "negation" | "min" | "max"; /** * The map of blend modes supported by Pixi * @category rendering * @advanced */ export declare const BLEND_TO_NPM: { normal: string; add: string; screen: string; }; /** * The stencil operation to perform when using the stencil buffer * @category rendering * @advanced */ export declare enum STENCIL_MODES { DISABLED = 0, RENDERING_MASK_ADD = 1, MASK_ACTIVE = 2, INVERSE_MASK_ACTIVE = 3, RENDERING_MASK_REMOVE = 4, NONE = 5 } /** * The culling mode to use. It can be either `none`, `front` or `back`. * @category rendering * @advanced */ export type CULL_MODES = "none" | "back" | "front"; /** * Used by the batcher to build texture batches. Holds list of textures and their respective locations. * @category rendering * @advanced */ export declare class BatchTextureArray { /** Inside textures array. */ textures: TextureSource[]; /** Respective locations for textures. */ ids: Record; /** Number of filled elements. */ count: number; constructor(); /** Clear the textures and their locations. */ clear(): void; } /** * The different topology types supported by the renderer used to describe how the geometry should be renderer * @category rendering * @advanced */ export type Topology = "point-list" | "line-list" | "line-strip" | "triangle-list" | "triangle-strip"; /** * @deprecated since 8.0.0 * @category rendering * @advanced */ export declare const DRAW_MODES: { POINTS: string; LINES: string; LINE_STRIP: string; TRIANGLES: string; TRIANGLE_STRIP: string; }; /** * The different types of vertex formats supported by the renderer * @category rendering * @advanced */ export type VertexFormat = "uint8x2" | "uint8x4" | "sint8x2" | "sint8x4" | "unorm8x2" | "unorm8x4" | "snorm8x2" | "snorm8x4" | "uint16x2" | "uint16x4" | "sint16x2" | "sint16x4" | "unorm16x2" | "unorm16x4" | "snorm16x2" | "snorm16x4" | "float16x2" | "float16x4" | "float32" | "float32x2" | "float32x3" | "float32x4" | "uint32" | "uint32x2" | "uint32x3" | "uint32x4" | "sint32" | "sint32x2" | "sint32x3" | "sint32x4"; /** * The index buffer array type used in geometries. * @category rendering * @advanced */ export type IndexBufferArray = Uint16Array | Uint32Array; /** * The attribute data for a geometries attributes * @category rendering * @advanced */ export interface Attribute { /** the buffer that this attributes data belongs to */ buffer: Buffer$1; /** the format of the attribute */ format?: VertexFormat; /** the stride of the data in the buffer - in bytes*/ stride?: number; /** the offset of the attribute from the buffer, defaults to 0 - in bytes*/ offset?: number; /** is this an instanced buffer? (defaults to false) */ instance?: boolean; /** the number of elements to be rendered. If not specified, all vertices after the starting vertex will be drawn. */ size?: number; /** * the starting vertex in the geometry to start drawing from. If not specified, * drawing will start from the first vertex. */ start?: number; /** * attribute divisor for instanced rendering. Note: this is a **WebGL-only** feature, the WebGPU renderer will * issue a warning if one of the attributes has divisor set. */ divisor?: number; } /** * The attribute option used by the constructor for adding geometries attributes * extends {@link Attribute} but allows for the buffer to be a typed or number array * @category rendering * @advanced */ export type AttributeOption = Omit & { buffer: Buffer$1 | TypedArray | number[]; } | Buffer$1 | TypedArray | number[]; /** * The attribute options used by the constructor for adding geometries attributes * extends {@link Attribute} but allows for the buffer to be a typed or number array * @category rendering * @advanced */ export type AttributeOptions = Record; /** * the interface that describes the structure of the geometry * @category rendering * @advanced */ export interface GeometryDescriptor { /** an optional label to easily identify the geometry */ label?: string; /** the attributes that make up the geometry */ attributes?: AttributeOptions; /** optional index buffer for this geometry */ indexBuffer?: Buffer$1 | TypedArray | number[]; /** the topology of the geometry, defaults to 'triangle-list' */ topology?: Topology; instanceCount?: number; } /** * A Geometry is a low-level object that represents the structure of 2D shapes in terms of vertices and attributes. * It's a crucial component for rendering as it describes the shape and format of the data that will go through the shaders. * Essentially, a Geometry object holds the data you'd send to a GPU buffer. * * A geometry is basically made of two components: *
* Attributes: These are essentially arrays that define properties of the vertices like position, color, * texture coordinates, etc. They map directly to attributes in your vertex shaders. *
* Indices: An optional array that describes how the vertices are connected. * If not provided, vertices will be interpreted in the sequence they're given. * @example * * const geometry = new Geometry({ * attributes: { * aPosition: [ // add some positions * 0, 0, * 0, 100, * 100, 100, * 100, 0, * ], * aUv: [ // add some uvs * 0, 0, * 0, 1, * 1, 1, * 1, 0, * ] * } * }); * @category rendering * @advanced */ export declare class Geometry extends EventEmitter<{ update: Geometry; destroy: Geometry; }> { /** The topology of the geometry. */ topology: Topology; /** The unique id of the geometry. */ readonly uid: number; /** A record of the attributes of the geometry. */ readonly attributes: Record; /** The buffers that the attributes use */ readonly buffers: Buffer$1[]; /** The index buffer of the geometry */ indexBuffer: Buffer$1; /** * the layout key will be generated by WebGPU all geometries that have the same structure * will have the same layout key. This is used to cache the pipeline layout * @internal */ _layoutKey: number; /** the instance count of the geometry to draw */ instanceCount: number; private readonly _bounds; private _boundsDirty; /** * Create a new instance of a geometry * @param options - The options for the geometry. */ constructor(options?: GeometryDescriptor); protected onBufferUpdate(): void; /** * Returns the requested attribute. * @param id - The name of the attribute required * @returns - The attribute requested. */ getAttribute(id: string): Attribute; /** * Returns the index buffer * @returns - The index buffer. */ getIndex(): Buffer$1; /** * Returns the requested buffer. * @param id - The name of the buffer required. * @returns - The buffer requested. */ getBuffer(id: string): Buffer$1; /** * Used to figure out how many vertices there are in this geometry * @returns the number of vertices in the geometry */ getSize(): number; /** * Adds an attribute to the geometry. * @param name - The name of the attribute to add. * @param attributeOption - The attribute option to add. */ addAttribute(name: string, attributeOption: AttributeOption): void; /** * Adds an index buffer to the geometry. * @param indexBuffer - The index buffer to add. Can be a Buffer, TypedArray, or an array of numbers. */ addIndex(indexBuffer: Buffer$1 | TypedArray | number[]): void; /** Returns the bounds of the geometry. */ get bounds(): Bounds; /** * destroys the geometry. * @param destroyBuffers - destroy the buffers associated with this geometry */ destroy(destroyBuffers?: boolean): void; } /** * An instruction that can be executed by the renderer * @category rendering * @advanced */ export interface Instruction { /** a the id of the render pipe that can run this instruction */ renderPipeId: string; /** the name of the instruction */ action?: string; /** true if this instruction can be compiled into a WebGPU bundle */ canBundle: boolean; } /** * This interface represents the extracted attribute data from a WebGL program. * It extends the `Attribute` interface but omits the `buffer` property. * It includes an optional `location` property that indicates where the shader location is for this attribute. * @category rendering * @advanced */ export interface ExtractedAttributeData extends Omit { /** set where the shader location is for this attribute */ location?: number; } /** * returns the attribute data from the program * @private * @param {WebGLProgram} [program] - the WebGL program * @param {WebGLRenderingContext} [gl] - the WebGL context * @param sortAttributes * @returns {object} the attribute data for this program */ export declare function extractAttributesFromGlProgram(program: WebGLProgram, gl: WebGLRenderingContextBase, sortAttributes?: boolean): Record; /** @internal */ export interface GlUniformData { name: string; index: number; type: string; size: number; isArray: boolean; value: any; } /** @internal */ export interface GlUniformBlockData { index: number; name: string; size: number; value?: TypedArray; } /** * The options for the gl program * @category rendering * @advanced */ export interface GlProgramOptions { /** The fragment glsl shader source. */ fragment: string; /** The vertex glsl shader source. */ vertex: string; /** the name of the program, defaults to 'pixi-program' */ name?: string; /** the preferred vertex precision for the shader, this may not be used if the device does not support it */ preferredVertexPrecision?: string; /** the preferred fragment precision for the shader, this may not be used if the device does not support it */ preferredFragmentPrecision?: string; transformFeedbackVaryings?: { names: string[]; bufferMode: "separate" | "interleaved"; }; } /** * A wrapper for a WebGL Program. You can create one and then pass it to a shader. * This will manage the WebGL program that is compiled and uploaded to the GPU. * * To get the most out of this class, you should be familiar with glsl shaders and how they work. * @see https://developer.mozilla.org/en-US/docs/Web/API/WebGLProgram * @example * * // Create a new program * const program = new GlProgram({ * vertex: '...', * fragment: '...', * }); * * * There are a few key things that pixi shader will do for you automatically: *
* - If no precision is provided in the shader, it will be injected into the program source for you. * This precision will be taken form the options provided, if none is provided, * then the program will default to the defaultOptions. *
* - It will inject the program name into the shader source if none is provided. *
* - It will set the program version to 300 es. * * For optimal usage and best performance, its best to reuse programs as much as possible. * You should use the {@link GlProgram.from} helper function to create programs. * @class * @category rendering * @advanced */ export declare class GlProgram { /** The default options used by the program. */ static defaultOptions: Partial; /** the fragment glsl shader source. */ readonly fragment?: string; /** the vertex glsl shader source */ readonly vertex?: string; /** * attribute data extracted from the program once created this happens when the program is used for the first time * @internal */ _attributeData: Record; /** * uniform data extracted from the program once created this happens when the program is used for the first time * @internal */ _uniformData: Record; /** * uniform data extracted from the program once created this happens when the program is used for the first time * @internal */ _uniformBlockData: Record; /** details on how to use this program with transform feedback */ transformFeedbackVaryings?: { names: string[]; bufferMode: "separate" | "interleaved"; }; /** * the key that identifies the program via its source vertex + fragment * @internal */ readonly _key: number; /** * Creates a shiny new GlProgram. Used by WebGL renderer. * @param options - The options for the program. */ constructor(options: GlProgramOptions); /** destroys the program */ destroy(): void; /** * Helper function that creates a program for a given source. * It will check the program cache if the program has already been created. * If it has that one will be returned, if not a new one will be created and cached. * @param options - The options for the program. * @returns A program using the same source */ static from(options: GlProgramOptions): GlProgram; } /** * Defines the structure of the extracted WGSL structs and groups. * @category rendering * @advanced */ export interface StructsAndGroups { groups: { group: number; binding: number; name: string; isUniform: boolean; type: string; }[]; structs: { name: string; members: Record; }[]; } /** * @param wgsl * @internal */ export declare function extractStructAndGroups(wgsl: string): StructsAndGroups; /** * a WebGPU descriptions of how the program is laid out * @see https://gpuweb.github.io/gpuweb/#gpupipelinelayout * @category rendering * @advanced */ export type ProgramPipelineLayoutDescription = GPUBindGroupLayoutEntry[][]; /** * a map the maps names of uniforms to group indexes * @category rendering * @advanced */ export type ProgramLayout = Record[]; /** * the program source * @category rendering * @advanced */ export interface ProgramSource { /** The wgsl source code of the shader. */ source: string; /** The main function to run in this shader */ entryPoint?: string; } /** * The options for the gpu program * @category rendering * @advanced */ export interface GpuProgramOptions { /** * the name of the program, this is added to the label of the GPU Program created * under the hood. Makes it much easier to debug! */ name?: string; /** The fragment glsl shader source. */ fragment?: ProgramSource; /** The vertex glsl shader source. */ vertex?: ProgramSource; /** The layout of the program. If not provided, it will be generated from the shader sources. */ layout?: ProgramLayout; /** The gpu layout of the program. If not provided, it will be generated from the shader sources. */ gpuLayout?: ProgramPipelineLayoutDescription; } /** * A wrapper for a WebGPU Program, specifically designed for the WebGPU renderer. * This class facilitates the creation and management of shader code that integrates with the WebGPU pipeline. * * To leverage the full capabilities of this class, familiarity with WGSL shaders is recommended. * @see https://gpuweb.github.io/gpuweb/#index * @example * * // Create a new program * const program = new GpuProgram({ * vertex: { * source: '...', * entryPoint: 'main', * }, * fragment:{ * source: '...', * entryPoint: 'main', * }, * }); * * * Note: Both fragment and vertex shader sources can coexist within a single WGSL source file * this can make things a bit simpler. * * For optimal usage and best performance, it help to reuse programs whenever possible. * The {@link GpuProgram.from} helper function is designed for this purpose, utilizing an * internal cache to efficiently manage and retrieve program instances. * By leveraging this function, you can significantly reduce overhead and enhance the performance of your rendering pipeline. * * An important distinction between WebGL and WebGPU regarding program data retrieval: * While WebGL allows extraction of program information directly from its compiled state, * WebGPU does not offer such a capability. Therefore, in the context of WebGPU, we're required * to manually extract the program layout information from the source code itself. * @category rendering * @advanced */ export declare class GpuProgram { /** The fragment glsl shader source. */ readonly fragment?: ProgramSource; /** The vertex glsl shader source */ readonly vertex?: ProgramSource; /** * Mapping of uniform names to group indexes for organizing shader program uniforms. * Automatically generated from shader sources if not provided. * @example * // Assuming a shader with two uniforms, `u_time` and `u_resolution`, grouped respectively: * [ * { "u_time": 0 }, * { "u_resolution": 1 } * ] */ readonly layout: ProgramLayout; /** * Configuration for the WebGPU bind group layouts, detailing resource organization for the shader. * Generated from shader sources if not explicitly provided. * @example * // Assuming a shader program that requires two bind groups: * [ * // First bind group layout entries * [{ binding: 0, visibility: GPUShaderStage.VERTEX, type: "uniform-buffer" }], * // Second bind group layout entries * [{ binding: 1, visibility: GPUShaderStage.FRAGMENT, type: "sampler" }, * { binding: 2, visibility: GPUShaderStage.FRAGMENT, type: "sampled-texture" }] * ] */ readonly gpuLayout: ProgramPipelineLayoutDescription; /** @internal */ _layoutKey: number; /** @internal */ _attributeLocationsKey: number; /** the structs and groups extracted from the shader sources */ readonly structsAndGroups: StructsAndGroups; /** * the name of the program, this is added to the label of the GPU Program created under the hood. * Makes it much easier to debug! */ readonly name: string; private _attributeData; /** if true, the program will automatically assign global uniforms to group[0] */ autoAssignGlobalUniforms: boolean; /** if true, the program will automatically assign local uniforms to group[1] */ autoAssignLocalUniforms: boolean; /** * Create a new GpuProgram * @param options - The options for the gpu program */ constructor(options: GpuProgramOptions); private _generateProgramKey; get attributeData(): Record; /** destroys the program */ destroy(): void; /** * Helper function that creates a program for a given source. * It will check the program cache if the program has already been created. * If it has that one will be returned, if not a new one will be created and cached. * @param options - The options for the program. * @returns A program using the same source */ static from(options: GpuProgramOptions): GpuProgram; } /** * A record of {@link BindGroup}'s used by the shader. * * `Record` * @category rendering * @advanced */ export type ShaderGroups = Record; interface ShaderBase { /** The WebGL program used by the WebGL renderer. */ glProgram?: GlProgram; /** The WebGPU program used by the WebGPU renderer. */ gpuProgram?: GpuProgram; /** * A number that uses two bits on whether the shader is compatible with the WebGL renderer and/or the WebGPU renderer. * 0b00 - not compatible with either * 0b01 - compatible with WebGL * 0b10 - compatible with WebGPU * This is automatically set based on if a {@link GlProgram} or {@link GpuProgram} is provided. */ compatibleRenderers?: number; } /** * A base interface for shaders that includes the common properties. * @category rendering * @advanced */ export interface GlShaderWith extends ShaderBase { /** The WebGL program used by the WebGL renderer. */ glProgram: GlProgram; } /** * A base interface for shaders that includes the common properties. * @category rendering * @advanced */ export interface GpuShaderWith extends ShaderBase { /** The WebGPU program used by the WebGPU renderer. */ gpuProgram: GpuProgram; } /** * A descriptor for a shader with groups. * This is used to define a shader that uses {@link BindGroup}'s. * @category rendering * @advanced */ export interface ShaderWithGroupsDescriptor { /** A record of {@link BindGroup}'s used by the shader. */ groups: ShaderGroups; /** an optional map of how to bind the groups. This is automatically generated by reading the WebGPU program */ groupMap?: Record>; } interface ShaderWithResourcesDescriptor { /** * A key value of uniform resources used by the shader. * Under the hood pixi will look at the provided shaders and figure out where * the resources are mapped. Its up to you to make sure the resource key * matches the uniform name in the webGPU program. WebGL is a little more forgiving! */ resources?: Record; } /** * A descriptor for a shader * @category rendering * @advanced */ export type ShaderWith = GlShaderWith | GpuShaderWith; /** * A descriptor for a shader with groups. * @category rendering * @advanced */ export type ShaderWithGroups = ShaderWithGroupsDescriptor & ShaderWith; /** * A descriptor for a shader with resources. This is an easier way to work with uniforms. * especially when you are not working with bind groups * @category rendering * @advanced */ export type ShaderWithResources = ShaderWithResourcesDescriptor & ShaderWith; /** * A shader that can be used with both WebGL and WebGPU. * @category rendering * @advanced */ export interface IShaderWithResources extends ShaderWithResourcesDescriptor, ShaderBase { } /** * A descriptor for a shader that can be used with both WebGL and WebGPU. * @category rendering * @advanced */ export type ShaderDescriptor = ShaderWithGroups & ShaderWithResources; type GlShaderFromWith = { gpu?: GpuProgramOptions; gl: GlProgramOptions; }; type GpuShaderFromWith = { gpu: GpuProgramOptions; gl?: GlProgramOptions; }; /** * A descriptor for a shader that can be used with both WebGL and WebGPU. * @category rendering * @advanced */ export type ShaderFromGroups = (GlShaderFromWith | GpuShaderFromWith) & Omit; /** * A descriptor for a shader that can be used with both WebGL and WebGPU. * @category rendering * @advanced */ export type ShaderFromResources = (GlShaderFromWith | GpuShaderFromWith) & Omit; /** * The Shader class is an integral part of the PixiJS graphics pipeline. * Central to rendering in PixiJS are two key elements: A [shader] and a [geometry]. * The shader incorporates a {@link GlProgram} for WebGL or a {@link GpuProgram} for WebGPU, * instructing the respective technology on how to render the geometry. * * The primary goal of the Shader class is to offer a unified interface compatible with both WebGL and WebGPU. * When constructing a shader, you need to provide both a WebGL program and a WebGPU program due to the distinctions * between the two rendering engines. If only one is provided, the shader won't function with the omitted renderer. * * Both WebGL and WebGPU utilize the same resource object when passed into the shader. * Post-creation, the shader's interface remains consistent across both WebGL and WebGPU. * The sole distinction lies in whether a glProgram or a gpuProgram is employed. * * Modifying shader uniforms, which can encompass: * - TextureSampler {@link TextureStyle} * - TextureSource {@link TextureSource} * - UniformsGroups {@link UniformGroup} * @example * * const shader = new Shader({ * glProgram: glProgram, * gpuProgram: gpuProgram, * resources: { * uTexture: texture.source, * uSampler: texture.sampler, * uColor: [1, 0, 0, 1], * }, * }); * * // update the uniforms * shader.resources.uColor[1] = 1; * shader.resources.uTexture = texture2.source; * @class * @category rendering * @advanced */ export declare class Shader extends EventEmitter<{ "destroy": Shader; }> { /** A unique identifier for the shader */ readonly uid: number; /** An instance of the GPU program used by the WebGPU renderer */ gpuProgram: GpuProgram; /** An instance of the GL program used by the WebGL renderer */ glProgram: GlProgram; /** * A number that uses two bits on whether the shader is compatible with the WebGL renderer and/or the WebGPU renderer. * 0b00 - not compatible with either * 0b01 - compatible with WebGL * 0b10 - compatible with WebGPU * This is automatically set based on if a {@link GlProgram} or {@link GpuProgram} is provided. */ readonly compatibleRenderers: number; /** */ groups: Record; /** A record of the resources used by the shader. */ resources: Record; /** * A record of the uniform groups and resources used by the shader. * This is used by WebGL renderer to sync uniform data. * @internal */ _uniformBindMap: Record>; private readonly _ownedBindGroups; /** * Fired after rendering finishes. * @event Shader#destroy */ /** * There are two ways to create a shader. * one is to pass in resources which is a record of uniform groups and resources. * another is to pass in groups which is a record of {@link BindGroup}s. * this second method is really to make use of shared {@link BindGroup}s. * For most cases you will want to use resources as they are easier to work with. * USe Groups if you want to share {@link BindGroup}s between shaders. * you cannot mix and match - either use resources or groups. * @param options - The options for the shader */ constructor(options: ShaderWithResources); constructor(options: ShaderWithGroups); /** * Sometimes a resource group will be provided later (for example global uniforms) * In such cases, this method can be used to let the shader know about the group. * @param name - the name of the resource group * @param groupIndex - the index of the group (should match the webGPU shader group location) * @param bindIndex - the index of the bind point (should match the webGPU shader bind point) */ addResource(name: string, groupIndex: number, bindIndex: number): void; private _buildResourceAccessor; /** * Use to destroy the shader when its not longer needed. * It will destroy the resources and remove listeners. * @param destroyPrograms - if the programs should be destroyed as well. * Make sure its not being used by other shaders! */ destroy(destroyPrograms?: boolean): void; /** * A short hand function to create a shader based of a vertex and fragment shader. * @param options * @returns A shiny new PixiJS shader! */ static from(options: ShaderFromGroups): Shader; static from(options: ShaderFromResources): Shader; } /** * The action types for a batch. * @category rendering * @advanced */ export type BatchAction = "startBatch" | "renderBatch"; /** * A batch pool is used to store batches when they are not currently in use. * @category rendering * @advanced */ export declare class Batch implements Instruction { renderPipeId: string; action: BatchAction; start: number; size: number; textures: BatchTextureArray; blendMode: BLEND_MODES; topology: Topology; canBundle: boolean; /** * breaking rules slightly here in the name of performance.. * storing references to these bindgroups here is just faster for access! * keeps a reference to the GPU bind group to set when rendering this batch for WebGPU. Will be null is using WebGL. */ gpuBindGroup: GPUBindGroup; /** * breaking rules slightly here in the name of performance.. * storing references to these bindgroups here is just faster for access! * keeps a reference to the bind group to set when rendering this batch for WebGPU. Will be null if using WebGl. */ bindGroup: BindGroup; batcher: Batcher; destroy(): void; } /** * Represents an element that can be batched for rendering. * @interface * @category rendering * @advanced */ export interface BatchableElement { /** * The name of the batcher to use. Must be registered. * @type {string} */ batcherName: string; /** * The texture to be used for rendering. * @type {Texture} */ texture: Texture; /** * The blend mode to be applied. * @type {BLEND_MODES} */ blendMode: BLEND_MODES; /** * The size of the index data. * @type {number} */ indexSize: number; /** * The size of the attribute data. * @type {number} */ attributeSize: number; /** * The topology to be used for rendering. * @type {Topology} */ topology: Topology; /** * Whether the element should be packed as a quad for better performance. * @type {boolean} */ packAsQuad: boolean; /** * The texture ID, stored for efficient updating. * @type {number} * @private */ _textureId: number; /** * The starting position in the attribute buffer. * @type {number} * @private */ _attributeStart: number; /** * The starting position in the index buffer. * @type {number} * @private */ _indexStart: number; /** * Reference to the batcher. * @type {Batcher} * @private */ _batcher: Batcher; /** * Reference to the batch. * @type {Batch} * @private */ _batch: Batch; } /** * Represents a batchable quad element. * @extends BatchableElement * @category rendering * @advanced */ export interface BatchableQuadElement extends BatchableElement { /** * Indicates that this element should be packed as a quad. * @type {true} */ packAsQuad: true; /** * The size of the attribute data for this quad element. * @type {4} */ attributeSize: 4; /** * The size of the index data for this quad element. * @type {6} */ indexSize: 6; /** * The bounds data for this quad element. * @type {BoundsData} */ bounds: BoundsData; } /** * Represents a batchable mesh element. * @extends BatchableElement * @category rendering * @advanced */ export interface BatchableMeshElement extends BatchableElement { /** * The UV coordinates of the mesh. * @type {number[] | Float32Array} */ uvs: number[] | Float32Array; /** * The vertex positions of the mesh. * @type {number[] | Float32Array} */ positions: number[] | Float32Array; /** * The indices of the mesh. * @type {number[] | Uint16Array | Uint32Array} */ indices: number[] | Uint16Array | Uint32Array; /** * The offset in the index buffer. * @type {number} */ indexOffset: number; /** * The offset in the attribute buffer. * @type {number} */ attributeOffset: number; /** * Indicates that this element should not be packed as a quad. * @type {false} */ packAsQuad: false; } /** * The options for the batcher. * @category rendering * @advanced */ export interface BatcherOptions { /** The maximum number of textures per batch. */ maxTextures: number; /** The initial size of the attribute buffer. */ attributesInitialSize?: number; /** The initial size of the index buffer. */ indicesInitialSize?: number; } /** * A batcher is used to batch together objects with the same texture. * It is an abstract class that must be extended. see DefaultBatcher for an example. * @category rendering * @advanced */ export declare abstract class Batcher { static defaultOptions: Partial; /** unique id for this batcher */ readonly uid: number; /** The buffer containing attribute data for all elements in the batch. */ attributeBuffer: ViewableBuffer; /** The buffer containing index data for all elements in the batch. */ indexBuffer: IndexBufferArray; /** The current size of the attribute data in the batch. */ attributeSize: number; /** The current size of the index data in the batch. */ indexSize: number; /** The total number of elements currently in the batch. */ elementSize: number; /** The starting index of elements in the current batch. */ elementStart: number; /** Indicates whether the batch data has been modified and needs updating. */ dirty: boolean; /** The current index of the batch being processed. */ batchIndex: number; /** An array of all batches created during the current rendering process. */ batches: Batch[]; private _elements; private _batchIndexStart; private _batchIndexSize; /** The maximum number of textures per batch. */ readonly maxTextures: number; /** The name of the batcher. Must be implemented by subclasses. */ abstract name: string; /** The vertex size of the batcher. Must be implemented by subclasses. */ protected abstract vertexSize: number; /** The geometry used by this batcher. Must be implemented by subclasses. */ abstract geometry: Geometry; /** * The shader used by this batcher. Must be implemented by subclasses. * this can be shared by multiple batchers of the same type. */ abstract shader: Shader; /** * Packs the attributes of a BatchableMeshElement into the provided views. * Must be implemented by subclasses. * @param element - The BatchableMeshElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ abstract packAttributes(element: BatchableMeshElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; /** * Packs the attributes of a BatchableQuadElement into the provided views. * Must be implemented by subclasses. * @param element - The BatchableQuadElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ abstract packQuadAttributes(element: BatchableQuadElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; constructor(options: BatcherOptions); begin(): void; add(batchableObject: BatchableElement): void; checkAndUpdateTexture(batchableObject: BatchableElement, texture: Texture): boolean; updateElement(batchableObject: BatchableElement): void; /** * breaks the batcher. This happens when a batch gets too big, * or we need to switch to a different type of rendering (a filter for example) * @param instructionSet */ break(instructionSet: InstructionSet): void; private _finishBatch; finish(instructionSet: InstructionSet): void; /** * Resizes the attribute buffer to the given size (1 = 1 float32) * @param size - the size in vertices to ensure (not bytes!) */ ensureAttributeBuffer(size: number): void; /** * Resizes the index buffer to the given size (1 = 1 float32) * @param size - the size in vertices to ensure (not bytes!) */ ensureIndexBuffer(size: number): void; private _resizeAttributeBuffer; private _resizeIndexBuffer; packQuadIndex(indexBuffer: IndexBufferArray, index: number, indicesOffset: number): void; packIndex(element: BatchableMeshElement, indexBuffer: IndexBufferArray, index: number, indicesOffset: number): void; destroy(): void; } /** * An interface for a pipe that can be used to build instructions for the renderer. * InstructionPipes are specifically used to manage the state of the renderer. * For example, the BlendModePipe is used to set the blend mode of the renderer. * @category rendering * @advanced */ export interface InstructionPipe { /** * called just before we execute the draw calls , this is where the pipes have an opportunity to * upload data to the GPU. This is only called if data changes. * @param instructionSet - the instruction set currently being built */ upload?: (instructionSet: InstructionSet) => void; /** * this is where the actual instruction is executed - eg make the draw call * activate a filter. Any instructions that have the same renderPipeId have their * execute method called * @param instruction - the instruction to execute */ execute?: (instruction: INSTRUCTION) => void; buildReset?: (instructionSet: InstructionSet) => void; buildStart?: (instructionSet: InstructionSet) => void; buildEnd?: (instructionSet: InstructionSet) => void; /** Called just after the render ends giving the RenderPipes a chance to do any cleanup */ renderEnd?: () => void; /** Called just before the render starts giving the RenderPipes a chance to do any setup */ renderStart?: () => void; /** * Used by the effect pipes push and pop effects to the renderer. A push effect allows * the renderer to change its state to support the effect. A pop effect allows the renderer * to return to its previous state. An example of this would be the filter effect. * @param effect - the effect to push * @param targetContainer - the container that the effect is being applied to * @param instructionSet - the instruction set currently being built */ push?: (effect: Effect, targetContainer: Container, instructionSet: InstructionSet) => void; /** * Used by effect pipes to pop effects from the renderer. * @param effect - the effect to pop * @param targetContainer - the container that the effect is being applied to * @param instructionSet - the instruction set currently being built */ pop?: (effect: Effect, targetContainer: Container, instructionSet: InstructionSet) => void; } /** * An interface for a pipe that can be used to build instructions for the renderer. * RenderPipes are specifically used to render Renderables like a Mesh. * @category rendering * @advanced */ export interface RenderPipe { /** * This is where the renderable is added to the instruction set. This is called once per renderable. * For instance, a MeshRenderPipe could be used to enqueue a 'draw mesh' command * to the rendering instruction set, catering to the rendering of mesh geometry. * In more complex scenarios, such as the SpritePipe, this seamlessly coordinates * with a batchPipe to efficiently batch and add batch instructions to the instructions set * * Add is called when the instructions set is being built. * @param renderable - the renderable that needs to be rendered * @param instructionSet - the instruction set currently being built */ addRenderable: (renderable: RENDERABLE, instructionSet: InstructionSet) => void; /** * Called whenever a renderable has been been updated, eg its position has changed. * This is only called in the render loop if the instructions set is being reused * from the last frame. Otherwise addRenderable is called. * @param renderable - the renderable that needs to be rendered */ updateRenderable: (renderable: RENDERABLE) => void; /** * Called whenever a renderable is destroyed, often the pipes keep a webGL / webGPU specific representation * of the renderable that needs to be tidied up when the renderable is destroyed. * @param renderable - the renderable that needs to be rendered */ destroyRenderable?: (renderable: RENDERABLE) => void; /** * This function is called when the renderer is determining if it can use the same instruction set again to * improve performance. If this function returns true, the renderer will rebuild the whole instruction set * for the scene. This is only called if the scene has not its changed its structure . * @param renderable * @returns {boolean} */ validateRenderable: (renderable: RENDERABLE) => boolean; } /** * An interface for a pipe that can be used to build instructions for the renderer. * BatchPipes are specifically used to build and render Batches. * @category rendering * @advanced */ export interface BatchPipe { /** * Add a add a batchable object to the batch. * @param renderable - a batchable object that can be added to the batch * @param instructionSet - the instruction set currently being built */ addToBatch: (renderable: BatchableElement, instructionSet: InstructionSet) => void; /** * Forces the batch to break. This can happen if for example you need to render everything and then * change the render target. * @param instructionSet - the instruction set currently being built */ break: (instructionSet: InstructionSet) => void; } /** * A helpful type that can be used to create a new RenderPipe, BatchPipe or InstructionPipe * @category rendering * @advanced */ export interface PipeConstructor { new (renderer: Renderer, adaptor?: any): RenderPipe | BatchPipe | InstructionPipe; } /** * Options for creating a render target. * @category rendering * @advanced */ export interface RenderTargetOptions { /** the width of the RenderTarget */ width?: number; /** the height of the RenderTarget */ height?: number; /** the resolution of the RenderTarget */ resolution?: number; /** an array of textures, or a number indicating how many color textures there should be */ colorTextures?: BindableTexture[] | number; /** should this render target have a stencil buffer? */ stencil?: boolean; /** should this render target have a depth buffer? */ depth?: boolean; /** a depth stencil texture that the depth and stencil outputs will be written to */ depthStencilTexture?: BindableTexture | boolean; /** should this render target be antialiased? */ antialias?: boolean; /** is this a root element, true if this is gl context owners render target */ isRoot?: boolean; } /** * A class that describes what the renderers are rendering to. * This can be as simple as a Texture, or as complex as a multi-texture, multi-sampled render target. * Support for stencil and depth buffers is also included. * * If you need something more complex than a Texture to render to, you should use this class. * Under the hood, all textures you render to have a RenderTarget created on their behalf. * @category rendering * @advanced */ export declare class RenderTarget { /** The default options for a render target */ static defaultOptions: RenderTargetOptions; /** unique id for this render target */ readonly uid: number; /** * An array of textures that can be written to by the GPU - mostly this has one texture in Pixi, but you could * write to multiple if required! (eg deferred lighting) */ colorTextures: TextureSource[]; /** the stencil and depth buffer will right to this texture in WebGPU */ depthStencilTexture: TextureSource; /** if true, will ensure a stencil buffer is added. For WebGPU, this will automatically create a depthStencilTexture */ stencil: boolean; /** if true, will ensure a depth buffer is added. For WebGPU, this will automatically create a depthStencilTexture */ depth: boolean; dirtyId: number; isRoot: boolean; private readonly _size; /** if true, then when the render target is destroyed, it will destroy all the textures that were created for it. */ private readonly _managedColorTextures; /** * @param [descriptor] - Options for creating a render target. */ constructor(descriptor?: RenderTargetOptions); get size(): [ number, number ]; get width(): number; get height(): number; get pixelWidth(): number; get pixelHeight(): number; get resolution(): number; get colorTexture(): TextureSource; protected onSourceResize(source: TextureSource): void; /** * This will ensure a depthStencil texture is created for this render target. * Most likely called by the mask system to make sure we have stencil buffer added. * @internal */ ensureDepthStencilTexture(): void; resize(width: number, height: number, resolution?: number, skipColorTexture?: boolean): void; destroy(): void; } /** * Represents a render target. * @category rendering * @ignore */ export declare class GlRenderTarget { width: number; height: number; msaa: boolean; framebuffer: WebGLFramebuffer; resolveTargetFramebuffer: WebGLFramebuffer; msaaRenderBuffer: WebGLRenderbuffer[]; depthStencilRenderBuffer: WebGLRenderbuffer; } /** * A class which holds the canvas contexts and textures for a render target. * @category rendering * @ignore */ export declare class GpuRenderTarget { contexts: GPUCanvasContext[]; msaaTextures: TextureSource[]; msaa: boolean; msaaSamples: number; width: number; height: number; descriptor: GPURenderPassDescriptor; } /** * A render surface is a texture, canvas, or render target * @category rendering * @see environment.ICanvas * @see Texture * @see RenderTarget * @advanced */ export type RenderSurface = ICanvas | BindableTexture | RenderTarget; /** * An adaptor interface for RenderTargetSystem to support WebGL and WebGPU. * This is used internally by the renderer, and is not intended to be used directly. * @ignore */ export interface RenderTargetAdaptor { init( /** the renderer */ renderer: Renderer, /** the render target system */ renderTargetSystem: RenderTargetSystem): void; /** A function copies the contents of a render surface to a texture */ copyToTexture( /** the render surface to copy from */ sourceRenderSurfaceTexture: RenderTarget, /** the texture to copy to */ destinationTexture: Texture, /** the origin of the copy */ originSrc: { x: number; y: number; }, /** the size of the copy */ size: { width: number; height: number; }, /** the destination origin (top left to paste from!) */ originDest?: { x: number; y: number; }): Texture; /** starts a render pass on the render target */ startRenderPass( /** the render target to start the render pass on */ renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, /** the color to clear to */ clearColor?: RgbaArray, /** the viewport to use */ viewport?: Rectangle): void; /** clears the current render target to the specified color */ clear( /** the render target to clear */ renderTarget: RenderTarget, /** the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 */ clear: CLEAR_OR_BOOL, /** the color to clear to */ clearColor?: RgbaArray, /** the viewport to use */ viewport?: Rectangle): void; /** finishes the current render pass */ finishRenderPass(renderTarget: RenderTarget): void; /** called after the render pass is finished */ postrender?(renderTarget: RenderTarget): void; /** called before the render main pass is started */ prerender?(renderTarget: RenderTarget): void; /** * initializes a gpu render target. Both renderers use this function to initialize a gpu render target * Its different type of object depending on the renderer. */ initGpuRenderTarget( /** the render target to initialize */ renderTarget: RenderTarget): RENDER_TARGET; /** called when a render target is resized */ resizeGpuRenderTarget( /** the render target to resize */ renderTarget: RenderTarget): void; /** destroys the gpu render target */ destroyGpuRenderTarget( /** the render target to destroy */ gpuRenderTarget: RENDER_TARGET): void; } /** * A system that manages render targets. A render target is essentially a place where the shaders can color in the pixels. * The render target system is responsible for binding the render target to the renderer, and managing the viewport. * Render targets can be pushed and popped. * * To make it easier, you can also bind textures and canvases too. This will automatically create a render target for you. * The render target itself is a lot more powerful than just a texture or canvas, * as it can have multiple textures attached to it. * It will also give ou fine grain control over the stencil buffer / depth texture. * @example * * ```js * * // create a render target * const renderTarget = new RenderTarget({ * colorTextures: [new TextureSource({ width: 100, height: 100 })], * }); * * // bind the render target * renderer.renderTarget.bind(renderTarget); * * // draw something! * ``` * @category rendering * @advanced */ export declare class RenderTargetSystem implements System { /** When rendering of a scene begins, this is where the root render surface is stored */ rootRenderTarget: RenderTarget; /** This is the root viewport for the render pass*/ rootViewPort: Rectangle; /** A boolean that lets the dev know if the current render pass is rendering to the screen. Used by some plugins */ renderingToScreen: boolean; /** the current active render target */ renderTarget: RenderTarget; /** the current active render surface that the render target is created from */ renderSurface: RenderSurface; /** the current viewport that the gpu is using */ readonly viewport: Rectangle; /** * a runner that lets systems know if the active render target has changed. * Eg the Stencil System needs to know so it can manage the stencil buffer */ readonly onRenderTargetChange: SystemRunner; /** the projection matrix that is used by the shaders based on the active render target and the viewport */ readonly projectionMatrix: Matrix; /** the default clear color for render targets */ readonly defaultClearColor: RgbaArray; /** a reference to the adaptor that interfaces with WebGL / WebGP */ readonly adaptor: RenderTargetAdaptor; /** * a hash that stores the render target for a given render surface. When you pass in a texture source, * a render target is created for it. This map stores and makes it easy to retrieve the render target */ private readonly _renderSurfaceToRenderTargetHash; /** A hash that stores a gpu render target for a given render target. */ private _gpuRenderTargetHash; /** * A stack that stores the render target and frame that is currently being rendered to. * When push is called, the current render target is stored in this stack. * When pop is called, the previous render target is restored. */ private readonly _renderTargetStack; /** A reference to the renderer */ private readonly _renderer; constructor(renderer: Renderer); /** called when dev wants to finish a render pass */ finishRenderPass(): void; /** * called when the renderer starts to render a scene. * @param options * @param options.target - the render target to render to * @param options.clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param options.clearColor - the color to clear to * @param options.frame - the frame to render to */ renderStart({ target, clear, clearColor, frame }: { target: RenderSurface; clear: CLEAR_OR_BOOL; clearColor: RgbaArray; frame?: Rectangle; }): void; postrender(): void; /** * Binding a render surface! This is the main function of the render target system. * It will take the RenderSurface (which can be a texture, canvas, or render target) and bind it to the renderer. * Once bound all draw calls will be rendered to the render surface. * * If a frame is not provide and the render surface is a texture, the frame of the texture will be used. * @param renderSurface - the render surface to bind * @param clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param clearColor - the color to clear to * @param frame - the frame to render to * @returns the render target that was bound */ bind(renderSurface: RenderSurface, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, frame?: Rectangle): RenderTarget; clear(target?: RenderSurface, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray): void; protected contextChange(): void; /** * Push a render surface to the renderer. This will bind the render surface to the renderer, * @param renderSurface - the render surface to push * @param clear - the clear mode to use. Can be true or a CLEAR number 'COLOR | DEPTH | STENCIL' 0b111 * @param clearColor - the color to clear to * @param frame - the frame to use when rendering to the render surface */ push(renderSurface: RenderSurface, clear?: CLEAR | boolean, clearColor?: RgbaArray, frame?: Rectangle): RenderTarget; /** Pops the current render target from the renderer and restores the previous render target. */ pop(): void; /** * Gets the render target from the provide render surface. Eg if its a texture, * it will return the render target for the texture. * If its a render target, it will return the same render target. * @param renderSurface - the render surface to get the render target for * @returns the render target for the render surface */ getRenderTarget(renderSurface: RenderSurface): RenderTarget; /** * Copies a render surface to another texture. * * NOTE: * for sourceRenderSurfaceTexture, The render target must be something that is written too by the renderer * * The following is not valid: * @example * const canvas = document.createElement('canvas') * canvas.width = 200; * canvas.height = 200; * * const ctx = canvas2.getContext('2d')! * ctx.fillStyle = 'red' * ctx.fillRect(0, 0, 200, 200); * * const texture = RenderTexture.create({ * width: 200, * height: 200, * }) * const renderTarget = renderer.renderTarget.getRenderTarget(canvas2); * * renderer.renderTarget.copyToTexture(renderTarget,texture, {x:0,y:0},{width:200,height:200},{x:0,y:0}); * * The best way to copy a canvas is to create a texture from it. Then render with that. * * Parsing in a RenderTarget canvas context (with a 2d context) * @param sourceRenderSurfaceTexture - the render surface to copy from * @param destinationTexture - the texture to copy to * @param originSrc - the origin of the copy * @param originSrc.x - the x origin of the copy * @param originSrc.y - the y origin of the copy * @param size - the size of the copy * @param size.width - the width of the copy * @param size.height - the height of the copy * @param originDest - the destination origin (top left to paste from!) * @param originDest.x - the x origin of the paste * @param originDest.y - the y origin of the paste */ copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; /** * ensures that we have a depth stencil buffer available to render to * This is used by the mask system to make sure we have a stencil buffer. */ ensureDepthStencil(): void; /** nukes the render target system */ destroy(): void; private _initRenderTarget; getGpuRenderTarget(renderTarget: RenderTarget): RENDER_TARGET; resetState(): void; } /** * Options passed to the ViewSystem * @category rendering * @advanced */ export interface ViewSystemOptions { /** * The width of the screen. * @default 800 */ width?: number; /** * The height of the screen. * @default 600 */ height?: number; /** The canvas to use as a view, optional. */ canvas?: ICanvas; /** * Alias for `canvas`. * @deprecated since 8.0.0 */ view?: ICanvas; /** * Resizes renderer view in CSS pixels to allow for resolutions other than 1. * * This is only supported for HTMLCanvasElement * and will be ignored if the canvas is an OffscreenCanvas. */ autoDensity?: boolean; /** The resolution / device pixel ratio of the renderer. */ resolution?: number; /** Whether to enable anti-aliasing. This may affect performance. */ antialias?: boolean; /** Whether to ensure the main view has can make use of the depth buffer. Always true for WebGL renderer. */ depth?: boolean; } /** * Options for destroying the ViewSystem. * @category rendering * @advanced */ export interface ViewSystemDestroyOptions { /** Whether to remove the view element from the DOM. Defaults to `false`. */ removeView?: boolean; } /** * The view system manages the main canvas that is attached to the DOM. * This main role is to deal with how the holding the view reference and dealing with how it is resized. * @category rendering * @advanced */ export declare class ViewSystem implements System> { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "view"; readonly priority: 0; }; /** The default options for the view system. */ static defaultOptions: ViewSystemOptions; /** The canvas element that everything is drawn to. */ canvas: ICanvas; /** The texture that is used to draw the canvas to the screen. */ texture: Texture; /** * Whether CSS dimensions of canvas view should be resized to screen dimensions automatically. * This is only supported for HTMLCanvasElement and will be ignored if the canvas is an OffscreenCanvas. * @type {boolean} */ get autoDensity(): boolean; set autoDensity(value: boolean); /** Whether to enable anti-aliasing. This may affect performance. */ antialias: boolean; /** * Measurements of the screen. (0, 0, screenWidth, screenHeight). * * Its safe to use as filterArea or hitArea for the whole stage. */ screen: Rectangle; /** The render target that the view is drawn to. */ renderTarget: RenderTarget; /** The resolution / device pixel ratio of the renderer. */ get resolution(): number; set resolution(value: number); /** * initiates the view system * @param options - the options for the view */ init(options: ViewSystemOptions): void; /** * Resizes the screen and canvas to the specified dimensions. * @param desiredScreenWidth - The new width of the screen. * @param desiredScreenHeight - The new height of the screen. * @param resolution */ resize(desiredScreenWidth: number, desiredScreenHeight: number, resolution: number): void; /** * Destroys this System and optionally removes the canvas from the dom. * @param {options | false} options - The options for destroying the view, or "false". * @example * viewSystem.destroy(); * viewSystem.destroy(true); * viewSystem.destroy({ removeView: true }); */ destroy(options?: TypeOrBool): void; } /** * A function that takes a renderer and does the custom rendering logic. * This is the function that will be called each frame. * @param renderer - The current renderer * @example * ```js * import { RenderContainer } from 'pixi.js'; * * // create a new render container * const renderContainer = new RenderContainer((renderer) => { * // custom render logic here * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * }); * ``` * @category scene * @advanced */ export type RenderFunction = (renderer: Renderer) => void; /** * Options for the {@link RenderContainer} constructor. * @category scene * @advanced * @noInheritDoc */ export interface RenderContainerOptions extends ContainerOptions { /** the optional custom render function if you want to inject the function via the constructor */ render?: RenderFunction; /** how to know if the custom render logic contains a point or not, used for interaction */ containsPoint?: (point: Point) => boolean; /** how to add the bounds of this object when measuring */ addBounds?: (bounds: BoundsData) => void; } /** * A container that allows for custom rendering logic. Its essentially calls the render function each frame * and allows for custom rendering logic - the render could be a WebGL renderer or WebGPU render or even a canvas render. * Its up to you to define the logic. * * This can be used in two ways, either by extending the class and overriding the render method, * or by passing a custom render function * @example * ```js * import { RenderContainer } from 'pixi.js'; * * // extend the class * class MyRenderContainer extends RenderContainer * { * render(renderer) * { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * } * } * * // override the render method * const renderContainer = new RenderContainer( * (renderer) => { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * }) * ``` * @category scene * @advanced */ export declare class RenderContainer extends ViewContainer implements Instruction { /** @internal */ readonly renderPipeId: string; /** @internal */ batched: boolean; /** * Adds the bounds of this text to the bounds object. * @param bounds - The output bounds object. */ addBounds: (bounds: Bounds) => void; /** * @param options - The options for the container. */ constructor(options: RenderContainerOptions | RenderFunction); /** @private */ protected updateBounds(): void; /** * An overridable function that can be used to render the object using the current renderer. * @param _renderer - The current renderer */ render(_renderer: Renderer): void; } /** * The CustomRenderPipe is a render pipe that allows for custom rendering logic for your renderable objects. * @example * import { RenderContainer } from 'pixi.js'; * * const renderContainer = new RenderContainer( * (renderer) => { * renderer.clear({ * clearColor: 'green', // clear the screen to green when rendering this item * }); * }) * @category rendering * @internal */ export declare class CustomRenderPipe implements InstructionPipe, RenderPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "customRender"; }; private _renderer; constructor(renderer: Renderer); updateRenderable(): void; destroyRenderable(): void; validateRenderable(): boolean; addRenderable(container: RenderContainer, instructionSet: InstructionSet): void; execute(container: RenderContainer): void; destroy(): void; } /** * This class represents a geometry used for batching in the rendering system. * It defines the structure of vertex attributes and index buffers for batched rendering. * @category rendering * @advanced */ export declare class BatchGeometry extends Geometry { constructor(); } /** * DefaultShader is a specialized shader class designed for batch rendering. * It extends the base Shader class and provides functionality for handling * color, texture batching, and pixel rounding in both WebGL and WebGPU contexts. * * It is used by the default batcher * @extends Shader * @category rendering * @advanced */ export declare class DefaultShader extends Shader { constructor(maxTextures: number); } /** * Represents the common elements for default batch rendering. * This interface defines the properties that are used by the DefaultBatcher * to render elements efficiently in a batch. * @category rendering * @advanced */ export interface DefaultBatchElements { /** * The color of the element that will be multiplied with the texture color. * This is typically represented as a 32-bit integer in RGBA format. */ color: number; /** * Determines whether the element should be rounded to the nearest pixel. * - 0: No rounding (default) * - 1: Round to nearest pixel * This can help with visual consistency, especially for pixel art styles. */ roundPixels: 0 | 1; /** * The transform matrix of the element. * This matrix represents the position, scale, rotation, and skew of the element. */ transform: Matrix; } /** * Represents a batchable quad element with default batch properties. * @category rendering * @advanced */ export interface DefaultBatchableQuadElement extends BatchableQuadElement, DefaultBatchElements { } /** * Represents a batchable mesh element with default batch properties. * @category rendering * @advanced */ export interface DefaultBatchableMeshElement extends BatchableMeshElement, DefaultBatchElements { } /** * The default batcher is used to batch quads and meshes. This batcher will batch the following elements: * - tints * - roundPixels * - texture * - transform * @category rendering * @advanced */ export declare class DefaultBatcher extends Batcher { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.Batcher ]; readonly name: "default"; }; geometry: BatchGeometry; shader: DefaultShader; name: "default"; /** The size of one attribute. 1 = 32 bit. x, y, u, v, color, textureIdAndRound -> total = 6 */ vertexSize: number; constructor(options: BatcherOptions); /** * Packs the attributes of a DefaultBatchableMeshElement into the provided views. * @param element - The DefaultBatchableMeshElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ packAttributes(element: DefaultBatchableMeshElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; /** * Packs the attributes of a DefaultBatchableQuadElement into the provided views. * @param element - The DefaultBatchableQuadElement to pack. * @param float32View - The Float32Array view to pack into. * @param uint32View - The Uint32Array view to pack into. * @param index - The starting index in the views. * @param textureId - The texture ID to use. */ packQuadAttributes(element: DefaultBatchableQuadElement, float32View: Float32Array, uint32View: Uint32Array, index: number, textureId: number): void; } /** * A batchable sprite object. * @internal */ export declare class BatchableSprite implements DefaultBatchableQuadElement { batcherName: string; topology: Topology; readonly attributeSize = 4; readonly indexSize = 6; readonly packAsQuad = true; transform: Matrix; renderable: Container; texture: Texture; bounds: BoundsData; roundPixels: 0 | 1; _indexStart: number; _textureId: number; _attributeStart: number; _batcher: Batcher; _batch: Batch; get blendMode(): BLEND_MODES; get color(): number; reset(): void; destroy(): void; } /** * Options for caching a container as a texture. * @category rendering * @advanced */ export interface CacheAsTextureOptions { /** * If true, the texture will be antialiased. This smooths out the edges of the texture. * @default false */ antialias?: boolean; /** * The resolution of the texture. A higher resolution means a sharper texture but uses more memory. * By default the resolution is 1 which is the same as the rendererers resolution. */ resolution?: number; /** * Scale Mode to use for the cached texture * @type {SCALE_MODE} * @default 'linear' * @example * ```ts * const container = new Container(); * container.cacheAsTexture({ scaleMode: 'nearest' }); * ``` * @see {@link SCALE_MODE} */ scaleMode?: SCALE_MODE; } /** * A RenderGroup is a class that is responsible for I generating a set of instructions that are used to render the * root container and its children. It also watches for any changes in that container or its children, * these changes are analysed and either the instruction set is rebuild or the instructions data is updated. * @category rendering * @advanced */ export declare class RenderGroup implements Instruction { renderPipeId: string; root: Container; canBundle: boolean; renderGroupParent: RenderGroup; renderGroupChildren: RenderGroup[]; worldTransform: Matrix; worldColorAlpha: number; worldColor: number; worldAlpha: number; readonly childrenToUpdate: Record; updateTick: number; gcTick: number; readonly childrenRenderablesToUpdate: { list: Container[]; index: number; }; structureDidChange: boolean; instructionSet: InstructionSet; private readonly _onRenderContainers; /** * Indicates if the cached texture needs to be updated. * @default true */ textureNeedsUpdate: boolean; /** * Indicates if the container should be cached as a texture. * @default false */ isCachedAsTexture: boolean; /** * The texture used for caching the container. this is only set if isCachedAsTexture is true. * It can only be accessed after a render pass. * @type {Texture | undefined} */ texture?: Texture; /** * The bounds of the cached texture. * @type {Bounds | undefined} * @ignore */ _textureBounds?: Bounds; /** * The options for caching the container as a texture. * @type {CacheAsTextureOptions} */ textureOptions: CacheAsTextureOptions; /** * holds a reference to the batchable render sprite * @ignore */ _batchableRenderGroup: BatchableSprite; /** * Holds a reference to the closest parent RenderGroup that has isCachedAsTexture enabled. * This is used to properly transform coordinates when rendering into cached textures. * @type {RenderGroup | null} * @ignore */ _parentCacheAsTextureRenderGroup: RenderGroup; private _inverseWorldTransform; private _textureOffsetInverseTransform; private _inverseParentTextureTransform; private _matrixDirty; init(root: Container): void; enableCacheAsTexture(options?: CacheAsTextureOptions): void; disableCacheAsTexture(): void; updateCacheTexture(): void; reset(): void; get localTransform(): Matrix; addRenderGroupChild(renderGroupChild: RenderGroup): void; private _removeRenderGroupChild; addChild(child: Container): void; removeChild(child: Container): void; removeChildren(children: Container[]): void; onChildUpdate(child: Container): void; updateRenderable(renderable: ViewContainer): void; onChildViewUpdate(child: Container): void; get isRenderable(): boolean; /** * adding a container to the onRender list will make sure the user function * passed in to the user defined 'onRender` callBack * @param container - the container to add to the onRender list */ addOnRender(container: Container): void; removeOnRender(container: Container): void; runOnRender(renderer: Renderer): void; destroy(): void; getChildren(out?: Container[]): Container[]; private _getChildren; invalidateMatrices(): void; /** * Returns the inverse of the world transform matrix. * @returns {Matrix} The inverse of the world transform matrix. */ get inverseWorldTransform(): Matrix; /** * Returns the inverse of the texture offset transform matrix. * @returns {Matrix} The inverse of the texture offset transform matrix. */ get textureOffsetInverseTransform(): Matrix; /** * Returns the inverse of the parent texture transform matrix. * This is used to properly transform coordinates when rendering into cached textures. * @returns {Matrix} The inverse of the parent texture transform matrix. */ get inverseParentTextureTransform(): Matrix; /** * Returns a matrix that transforms coordinates to the correct coordinate space of the texture being rendered to. * This is the texture offset inverse transform of the closest parent RenderGroup that is cached as a texture. * @returns {Matrix | null} The transform matrix for the cached texture coordinate space, * or null if no parent is cached as texture. */ get cacheToLocalTransform(): Matrix; } /** * The RenderGroupPipe is a render pipe for rendering RenderGroups. * @internal */ export declare class RenderGroupPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "renderGroup"; }; private _renderer; constructor(renderer: Renderer); addRenderGroup(renderGroup: RenderGroup, instructionSet: InstructionSet): void; execute(renderGroup: RenderGroup): void; destroy(): void; private _addRenderableDirect; private _addRenderableCacheAsTexture; private _executeCacheAsTexture; private _executeDirect; } /** * The view system manages the main canvas that is attached to the DOM. * This main role is to deal with how the holding the view reference and dealing with how it is resized. * @category rendering * @internal */ export declare class RenderGroupSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "renderGroup"; }; private readonly _renderer; constructor(renderer: Renderer); protected render({ container, transform }: { container: Container; transform: Matrix; }): void; destroy(): void; private _updateCachedRenderGroups; private _updateRenderGroups; private _updateRenderables; /** * @param renderGroup * @param renderPipes * @deprecated since 8.3.0 */ private _buildInstructions; } /** * Defines a size with a width and a height. * @category maths * @standard */ export interface Size { /** The width. */ width: number; /** The height. */ height: number; } /** * A utility type that makes all properties of T optional except for the specified keys K. * @category utils * @internal */ export type Optional = Omit & Partial>; /** @ignore */ export interface MeasureMixinConstructor { /** * The width of the display object, in pixels. * @example * ```ts * new Container({ width: 100}); * ``` * @default 0 */ width?: number; /** * The height of the display object, in pixels. * @example * ```ts * new Container({ height: 100}); * ``` * @default 0 */ height?: number; } /** * The MeasureMixin interface provides methods for measuring and manipulating the size and bounds of a display object. * It includes methods to get and set the size of the object, retrieve its local bounds, * and calculate its global bounds. * @category scene * @advanced */ export interface MeasureMixin extends Required { getSize(out?: Size): Size; setSize(width: number, height?: number): void; setSize(value: Optional): void; /** * Retrieves the local bounds of the container as a Bounds object. * Uses cached values when possible for better performance. * @example * ```ts * // Basic bounds check * const bounds = container.getLocalBounds(); * console.log(`Width: ${bounds.width}, Height: ${bounds.height}`); * // subsequent calls will reuse the cached bounds * const cachedBounds = container.getLocalBounds(); * console.log(bounds === cachedBounds); // true * ``` * @returns The bounding area * @see {@link Container#getBounds} For world space bounds * @see {@link Bounds} For bounds properties */ getLocalBounds(): Bounds; /** * Calculates and returns the (world) bounds of the display object as a Rectangle. * Takes into account transforms and child bounds. * @example * ```ts * // Basic bounds calculation * const bounds = sprite.getBounds(); * console.log(`World bounds: ${bounds.x}, ${bounds.y}, ${bounds.width}, ${bounds.height}`); * * // Reuse bounds object for performance * const recycleBounds = new Bounds(); * sprite.getBounds(false, recycleBounds); * * // Skip update for performance * const fastBounds = sprite.getBounds(true); * ``` * @remarks * - Includes transform calculations * - Updates scene graph by default * - Can reuse bounds objects * - Common in hit testing * @param {boolean} skipUpdate - Setting to `true` will stop the transforms of the scene graph from * being updated. This means the calculation returned MAY be out of date BUT will give you a * nice performance boost. * @param {Bounds} bounds - Optional bounds to store the result of the bounds calculation * @returns The minimum axis-aligned rectangle in world space that fits around this object * @see {@link Container#getLocalBounds} For untransformed bounds * @see {@link Bounds} For bounds properties */ getBounds(skipUpdate?: boolean, bounds?: Bounds): Bounds; /** @private */ _localBoundsCacheData: LocalBoundsCacheData; /** @private */ _localBoundsCacheId: number; /** @private */ _setWidth(width: number, localWidth: number): void; /** @private */ _setHeight(height: number, localHeight: number): void; } interface LocalBoundsCacheData { data: number[]; index: number; didChange: boolean; localBounds: Bounds; } /** @internal */ export declare const measureMixin: Partial; /** * Options for configuring a Sprite instance. Defines the texture, anchor point, and rendering behavior. * @example * ```ts * // Create a basic sprite with texture * const sprite = new Sprite({ * texture: Texture.from('sprite.png') * }); * * // Create a centered sprite with rounded position * const centeredSprite = new Sprite({ * texture: Texture.from('centered.png'), * anchor: 0.5, // Center point * roundPixels: true, // Crisp rendering * x: 100, // Position from ViewContainerOptions * y: 100 * }); * * // Create a sprite with specific anchor points * const anchoredSprite = new Sprite({ * texture: Texture.from('corner.png'), * anchor: { * x: 1, // Right-aligned * y: 0 // Top-aligned * } * }); * ``` * @extends ViewContainerOptions * @category scene * @standard * @noInheritDoc */ export interface SpriteOptions extends PixiMixins.SpriteOptions, ViewContainerOptions { /** * The texture to use for the sprite. If not provided, uses Texture.EMPTY * @default Texture.EMPTY * @example * ```ts * // Create a sprite with a texture * const sprite = new Sprite({ * texture: Texture.from('path/to/image.png') * }); * // Update the texture later * sprite.texture = Texture.from('path/to/another-image.png'); * ``` */ texture?: Texture; /** * The anchor point of the sprite (0-1 range). * Controls the origin point for rotation, scaling, and positioning. * Can be a number for uniform anchor or a PointData for separate x/y values. * @default 0 * @example * ```ts * // Centered anchor * anchor: 0.5 * // Separate x/y anchor * anchor: { x: 0.5, y: 0.5 } * // Right-aligned anchor * anchor: { x: 1, y: 0 } * ``` */ anchor?: PointData | number; /** * Whether or not to round the x/y position to whole pixels. * Useful for crisp pixel art style rendering. * @default false * @example * ```ts * const sprite = new Sprite({ * texture: Texture.from('sprite.png'), * roundPixels: true // Ensures crisp rendering * }); * ``` */ roundPixels?: boolean; } export interface Sprite extends PixiMixins.Sprite, ViewContainer { } /** * The Sprite object is one of the most important objects in PixiJS. It is a * drawing item that can be added to a scene and rendered to the screen. * Sprites can display images, handle input events, and be transformed in various ways. * @example * ```ts * // Create a sprite directly from an image path * const sprite = Sprite.from('assets/image.png'); * sprite.position.set(100, 100); * app.stage.addChild(sprite); * * // Create from a spritesheet (more efficient) * const sheet = await Assets.load('assets/spritesheet.json'); * const sprite = new Sprite(sheet.textures['image.png']); * * // Create with specific options * const configuredSprite = new Sprite({ * texture: Texture.from('sprite.png'), * anchor: 0.5, // Center anchor point * position: { x: 100, y: 100 }, * scale: { x: 2, y: 2 }, // Double size * rotation: Math.PI / 4 // 45 degrees * }); * * // Animate sprite properties * app.ticker.add(() => { * sprite.rotation += 0.1; // Rotate * sprite.scale.x = Math.sin(performance.now() / 1000) + 1; // Pulse scale * }); * ``` * @category scene * @standard * @see {@link SpriteOptions} For configuration options * @see {@link Texture} For texture management * @see {@link Assets} For asset loading */ export declare class Sprite extends ViewContainer { /** * Creates a new sprite based on a source texture, image, video, or canvas element. * This is a convenience method that automatically creates and manages textures. * @example * ```ts * // Create from path or URL * const sprite = Sprite.from('assets/image.png'); * * // Create from existing texture * const sprite = Sprite.from(texture); * * // Create from canvas * const canvas = document.createElement('canvas'); * const sprite = Sprite.from(canvas, true); // Skip caching new texture * ``` * @param source - The source to create the sprite from. Can be a path to an image, a texture, * or any valid texture source (canvas, video, etc.) * @param skipCache - Whether to skip adding to the texture cache when creating a new texture * @returns A new sprite based on the source * @see {@link Texture.from} For texture creation details * @see {@link Assets} For asset loading and management */ static from(source: Texture | TextureSourceLike, skipCache?: boolean): Sprite; /** @internal */ readonly renderPipeId: string; /** @internal */ batched: boolean; /** @internal */ readonly _anchor: ObservablePoint; /** @internal */ _texture: Texture; private readonly _visualBounds; private _width; private _height; /** * @param options - The options for creating the sprite. */ constructor(options?: SpriteOptions | Texture); set texture(value: Texture); /** * The texture that is displayed by the sprite. When changed, automatically updates * the sprite dimensions and manages texture event listeners. * @example * ```ts * // Create sprite with texture * const sprite = new Sprite({ * texture: Texture.from('sprite.png') * }); * * // Update texture * sprite.texture = Texture.from('newSprite.png'); * * // Use texture from spritesheet * const sheet = await Assets.load('spritesheet.json'); * sprite.texture = sheet.textures['frame1.png']; * * // Reset to empty texture * sprite.texture = Texture.EMPTY; * ``` * @see {@link Texture} For texture creation and management * @see {@link Assets} For asset loading */ get texture(): Texture; /** * The bounds of the sprite, taking into account the texture's trim area. * @example * ```ts * const texture = new Texture({ * source: new TextureSource({ width: 300, height: 300 }), * frame: new Rectangle(196, 66, 58, 56), * trim: new Rectangle(4, 4, 58, 56), * orig: new Rectangle(0, 0, 64, 64), * rotate: 2, * }); * const sprite = new Sprite(texture); * const visualBounds = sprite.visualBounds; * // console.log(visualBounds); // { minX: -4, maxX: 62, minY: -4, maxY: 60 } */ get visualBounds(): BoundsData; /** * @deprecated * @ignore */ get sourceBounds(): BoundsData; /** @private */ protected updateBounds(): void; /** * Destroys this sprite renderable and optionally its texture. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @example * sprite.destroy(); * sprite.destroy(true); * sprite.destroy({ texture: true, textureSource: true }); */ destroy(options?: DestroyOptions): void; /** * The anchor sets the origin point of the sprite. The default value is taken from the {@link Texture} * and passed to the constructor. * * - The default is `(0,0)`, this means the sprite's origin is the top left. * - Setting the anchor to `(0.5,0.5)` means the sprite's origin is centered. * - Setting the anchor to `(1,1)` would mean the sprite's origin point will be the bottom right corner. * * If you pass only single parameter, it will set both x and y to the same value as shown in the example below. * @example * ```ts * // Center the anchor point * sprite.anchor = 0.5; // Sets both x and y to 0.5 * sprite.position.set(400, 300); // Sprite will be centered at this position * * // Set specific x/y anchor points * sprite.anchor = { * x: 1, // Right edge * y: 0 // Top edge * }; * * // Using individual coordinates * sprite.anchor.set(0.5, 1); // Center-bottom * * // For rotation around center * sprite.anchor.set(0.5); * sprite.rotation = Math.PI / 4; // 45 degrees around center * * // For scaling from center * sprite.anchor.set(0.5); * sprite.scale.set(2); // Scales from center point * ``` */ get anchor(): ObservablePoint; set anchor(value: PointData | number); /** * The width of the sprite, setting this will actually modify the scale to achieve the value set. * @example * ```ts * // Set width directly * sprite.width = 200; * console.log(sprite.scale.x); // Scale adjusted to match width * * // Set width while preserving aspect ratio * const ratio = sprite.height / sprite.width; * sprite.width = 300; * sprite.height = 300 * ratio; * * // For better performance when setting both width and height * sprite.setSize(300, 400); // Avoids recalculating bounds twice * * // Reset to original texture size * sprite.width = sprite.texture.orig.width; * ``` */ get width(): number; set width(value: number); /** * The height of the sprite, setting this will actually modify the scale to achieve the value set. * @example * ```ts * // Set height directly * sprite.height = 150; * console.log(sprite.scale.y); // Scale adjusted to match height * * // Set height while preserving aspect ratio * const ratio = sprite.width / sprite.height; * sprite.height = 200; * sprite.width = 200 * ratio; * * // For better performance when setting both width and height * sprite.setSize(300, 400); // Avoids recalculating bounds twice * * // Reset to original texture size * sprite.height = sprite.texture.orig.height; * ``` */ get height(): number; set height(value: number); /** * Retrieves the size of the Sprite as a [Size]{@link Size} object based on the texture dimensions and scale. * This is faster than getting width and height separately as it only calculates the bounds once. * @example * ```ts * // Basic size retrieval * const sprite = new Sprite(Texture.from('sprite.png')); * const size = sprite.getSize(); * console.log(`Size: ${size.width}x${size.height}`); * * // Reuse existing size object * const reuseSize = { width: 0, height: 0 }; * sprite.getSize(reuseSize); * ``` * @param out - Optional object to store the size in, to avoid allocating a new object * @returns The size of the Sprite * @see {@link Sprite#width} For getting just the width * @see {@link Sprite#height} For getting just the height * @see {@link Sprite#setSize} For setting both width and height */ getSize(out?: Size): Size; /** * Sets the size of the Sprite to the specified width and height. * This is faster than setting width and height separately as it only recalculates bounds once. * @example * ```ts * // Basic size setting * const sprite = new Sprite(Texture.from('sprite.png')); * sprite.setSize(100, 200); // Width: 100, Height: 200 * * // Set uniform size * sprite.setSize(100); // Sets both width and height to 100 * * // Set size with object * sprite.setSize({ * width: 200, * height: 300 * }); * * // Reset to texture size * sprite.setSize( * sprite.texture.orig.width, * sprite.texture.orig.height * ); * ``` * @param value - This can be either a number or a {@link Size} object * @param height - The height to set. Defaults to the value of `width` if not provided * @see {@link Sprite#width} For setting width only * @see {@link Sprite#height} For setting height only * @see {@link Sprite#texture} For the source dimensions */ setSize(value: number | Optional, height?: number): void; } /** @internal */ export declare class SpritePipe implements RenderPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "sprite"; }; private _renderer; constructor(renderer: Renderer); addRenderable(sprite: Sprite, instructionSet: InstructionSet): void; updateRenderable(sprite: Sprite): void; validateRenderable(sprite: Sprite): boolean; private _updateBatchableSprite; private _getGpuSprite; private _initGPUSprite; destroy(): void; } /** * The GPU object. * Contains the GPU adapter and device. * @category rendering * @advanced */ interface GPU$1 { /** The GPU adapter */ adapter: GPUAdapter; /** The GPU device */ device: GPUDevice; } /** * Options for the WebGPU context. * @property {GpuPowerPreference} [powerPreference=default] - An optional hint indicating what configuration of GPU * is suitable for the WebGPU context, can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @property {boolean} [forceFallbackAdapter=false] - Force the use of the fallback adapter * @category rendering * @advanced */ export interface GpuContextOptions { /** * An optional hint indicating what configuration of GPU is suitable for the WebGPU context, * can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @default undefined */ powerPreference?: GpuPowerPreference; /** * Force the use of the fallback adapter * @default false */ forceFallbackAdapter: boolean; /** Using shared device and adaptor from other engine */ gpu?: GPU$1; } /** * System plugin to the renderer to manage the context. * @class * @category rendering * @advanced */ export declare class GpuDeviceSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "device"; }; /** The default options for the GpuDeviceSystem. */ static defaultOptions: GpuContextOptions; /** The GPU device */ gpu: GPU$1; private _renderer; private _initPromise; /** * @param {WebGPURenderer} renderer - The renderer this System works for. */ constructor(renderer: WebGPURenderer); init(options: GpuContextOptions): Promise; /** * Handle the context change event * @param gpu */ protected contextChange(gpu: GPU$1): void; /** * Helper class to create a WebGL Context * @param {object} options - An options object that gets passed in to the canvas element containing the * context attributes * @see https://developer.mozilla.org/en/docs/Web/API/HTMLCanvasElement/getContext * @returns {WebGLRenderingContext} the WebGL context */ private _createDeviceAndAdaptor; destroy(): void; } /** * This manages the WebGPU bind groups. this is how data is bound to a shader when rendering * @category rendering * @advanced */ export declare class BindGroupSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "bindGroup"; }; private readonly _renderer; private _hash; private _gpu; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; getBindGroup(bindGroup: BindGroup, program: GpuProgram, groupIndex: number): GPUBindGroup; private _createBindGroup; destroy(): void; } /** * System plugin to the renderer to manage buffers. * @category rendering * @advanced */ export declare class GpuBufferSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "buffer"; }; protected CONTEXT_UID: number; private _gpuBuffers; private readonly _managedBuffers; private _gpu; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; getGPUBuffer(buffer: Buffer$1): GPUBuffer; updateBuffer(buffer: Buffer$1): GPUBuffer; /** dispose all WebGL resources of all managed buffers */ destroyAll(): void; createGPUBuffer(buffer: Buffer$1): GPUBuffer; protected onBufferChange(buffer: Buffer$1): void; /** * Disposes buffer * @param buffer - buffer with data */ protected onBufferDestroy(buffer: Buffer$1): void; destroy(): void; private _destroyBuffer; } /** * The system that handles color masking for the GPU. * @category rendering * @advanced */ export declare class GpuColorMaskSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorMaskCache; constructor(renderer: WebGPURenderer); setMask(colorMask: number): void; destroy(): void; } /** * This is a WebGL state, and is is passed to {@link GlStateSystem}. * * Each mesh rendered may require WebGL to be in a different state. * For example you may want different blend mode or to enable polygon offsets * @category rendering * @advanced */ export declare class State { /** * The data is a unique number based on the states settings. * This lets us quickly compare states with a single number rather than looking * at all the individual settings. */ data: number; /** @internal */ _blendModeId: number; private _blendMode; private _polygonOffset; constructor(); /** * Activates blending of the computed fragment color values. * @default true */ get blend(): boolean; set blend(value: boolean); /** * Activates adding an offset to depth values of polygon's fragments * @default false */ get offsets(): boolean; set offsets(value: boolean); /** The culling settings for this state none - No culling back - Back face culling front - Front face culling */ set cullMode(value: CULL_MODES); get cullMode(): CULL_MODES; /** * Activates culling of polygons. * @default false */ get culling(): boolean; set culling(value: boolean); /** * Activates depth comparisons and updates to the depth buffer. * @default false */ get depthTest(): boolean; set depthTest(value: boolean); /** * Enables or disables writing to the depth buffer. * @default true */ get depthMask(): boolean; set depthMask(value: boolean); /** * Specifies whether or not front or back-facing polygons can be culled. * @default false */ get clockwiseFrontFace(): boolean; set clockwiseFrontFace(value: boolean); /** * The blend mode to be applied when this state is set. Apply a value of `normal` to reset the blend mode. * Setting this mode to anything other than NO_BLEND will automatically switch blending on. * @default 'normal' */ get blendMode(): BLEND_MODES; set blendMode(value: BLEND_MODES); /** * The polygon offset. Setting this property to anything other than 0 will automatically enable polygon offset fill. * @default 0 */ get polygonOffset(): number; set polygonOffset(value: number); toString(): string; /** * A quickly getting an instance of a State that is configured for 2d rendering. * @returns a new State with values set for 2d rendering */ static for2d(): State; static default2d: State; } /** * The system that handles encoding commands for the GPU. * @category rendering * @advanced */ export declare class GpuEncoderSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "encoder"; readonly priority: 1; }; commandEncoder: GPUCommandEncoder; renderPassEncoder: GPURenderPassEncoder; commandFinished: Promise; private _resolveCommandFinished; private _gpu; private _boundBindGroup; private _boundVertexBuffer; private _boundIndexBuffer; private _boundPipeline; private readonly _renderer; constructor(renderer: WebGPURenderer); renderStart(): void; beginRenderPass(gpuRenderTarget: GpuRenderTarget): void; endRenderPass(): void; setViewport(viewport: Rectangle): void; setPipelineFromGeometryProgramAndState(geometry: Geometry, program: GpuProgram, state: any, topology?: Topology): void; setPipeline(pipeline: GPURenderPipeline): void; private _setVertexBuffer; private _setIndexBuffer; resetBindGroup(index: number): void; setBindGroup(index: number, bindGroup: BindGroup, program: GpuProgram): void; setGeometry(geometry: Geometry, program: GpuProgram): void; private _setShaderBindGroups; private _syncBindGroup; draw(options: { geometry: Geometry; shader: Shader; state?: State; topology?: Topology; size?: number; start?: number; instanceCount?: number; skipSync?: boolean; }): void; finishRenderPass(): void; postrender(): void; restoreRenderPass(): void; private _clearCache; destroy(): void; protected contextChange(gpu: GPU$1): void; } /** * The GpuLimitsSystem provides information about the capabilities and limitations of the underlying GPU. * These limits, such as the maximum number of textures that can be used in a shader * (`maxTextures`) or the maximum number of textures that can be batched together (`maxBatchableTextures`), * are determined by the specific graphics hardware and driver. * * The values for these limits are not available immediately upon instantiation of the class. * They are populated when the WebGPU Device rendering context is successfully initialized and ready, * which occurs after the `renderer.init()` method has completed. * Attempting to access these properties before the context is ready will result in undefined or default values. * * This system allows the renderer to adapt its behavior and resource allocation strategies * to stay within the supported boundaries of the GPU, ensuring optimal performance and stability. * @example * ```ts * const renderer = new WebGPURenderer(); * await renderer.init(); // GPU limits are populated after this call * * console.log(renderer.limits.maxTextures); * console.log(renderer.limits.maxBatchableTextures); * ``` * @category rendering * @advanced */ export declare class GpuLimitsSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "limits"; }; /** The maximum number of textures that can be used by a shader */ maxTextures: number; /** The maximum number of batchable textures */ maxBatchableTextures: number; private readonly _renderer; constructor(renderer: WebGPURenderer); contextChange(): void; destroy(): void; } /** * This manages the stencil buffer. Used primarily for masking * @category rendering * @advanced */ export declare class GpuStencilSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "stencil"; }; private readonly _renderer; private _renderTargetStencilState; private _activeRenderTarget; constructor(renderer: WebGPURenderer); protected onRenderTargetChange(renderTarget: RenderTarget): void; setStencilMode(stencilMode: STENCIL_MODES, stencilReference: number): void; destroy(): void; } /** @internal */ export declare const UNIFORM_TYPES_VALUES: readonly [ "f32", "i32", "vec2", "vec3", "vec4", "mat2x2", "mat3x3", "mat4x4", "mat3x2", "mat4x2", "mat2x3", "mat4x3", "mat2x4", "mat3x4", "vec2", "vec3", "vec4" ]; /** * useful for checking if a type is supported - a map of supported types with a true value. * @internal */ export declare const UNIFORM_TYPES_MAP: Record; /** @internal */ export type UNIFORM_TYPES_SINGLE = typeof UNIFORM_TYPES_VALUES[number]; type OPTIONAL_SPACE = " " | ""; /** @internal */ export type UNIFORM_TYPES_ARRAY = `array<${UNIFORM_TYPES_SINGLE},${OPTIONAL_SPACE}${number}>`; /** @internal */ export type UNIFORM_TYPES = UNIFORM_TYPES_SINGLE | UNIFORM_TYPES_ARRAY; /** * This is the type of the uniform structures that are used in the UniformGroup. * @category rendering * @advanced */ export interface UniformData { /** the value of the uniform, this could be any object - a parser will figure out how to write it to the buffer */ value: unknown; type: UNIFORM_TYPES; /** the size of the variable (eg 2 for vec2, 3 for vec3, 4 for vec4) */ size?: number; name?: string; } /** @internal */ export interface UboElement { data: UniformData; offset: number; size: number; } /** @internal */ export interface UboLayout { uboElements: UboElement[]; /** float32 size // TODO change to bytes */ size: number; } /** @internal */ export type UniformsSyncCallback = (...args: any[]) => void; type FLOPS = T extends { value: infer V; } ? V : never; /** * Extracts the value type from a uniform data object. * @internal */ export type ExtractUniformObject> = { [K in keyof T]: FLOPS; }; /** * Uniform group options * @category rendering * @advanced */ export type UniformGroupOptions = { /** * if true the UniformGroup is handled as an Uniform buffer object. * This is the only way WebGPU can work with uniforms. WebGL2 can also use this. * So don't set to true if you want to use WebGPU :D */ ubo?: boolean; /** if true, then you are responsible for when the data is uploaded to the GPU by calling `update()` */ isStatic?: boolean; }; /** * Uniform group holds uniform map and some ID's for work * * `UniformGroup` has two modes: * * 1: Normal mode * Normal mode will upload the uniforms with individual function calls as required. This is the default mode * for WebGL rendering. * * 2: Uniform buffer mode * This mode will treat the uniforms as a uniform buffer. You can pass in either a buffer that you manually handle, or * or a generic object that PixiJS will automatically map to a buffer for you. * For maximum benefits, make Ubo UniformGroups static, and only update them each frame. * This is the only way uniforms can be used with WebGPU. * * Rules of UBOs: * - UBOs only work with WebGL2, so make sure you have a fallback! * - Only floats are supported (including vec[2,3,4], mat[2,3,4]) * - Samplers cannot be used in ubo's (a GPU limitation) * - You must ensure that the object you pass in exactly matches in the shader ubo structure. * Otherwise, weirdness will ensue! * - The name of the ubo object added to the group must match exactly the name of the ubo in the shader. * * When declaring your uniform options, you ust parse in the value and the type of the uniform. * The types correspond to the WebGPU types * Uniforms can be modified via the classes 'uniforms' property. It will contain all the uniforms declared in the constructor. * * ```ts * // UBO in shader: * uniform myCoolData { // Declaring a UBO... * mat4 uCoolMatrix; * float uFloatyMcFloatFace; * }; * ``` * * ```js * // A new Uniform Buffer Object... * const myCoolData = new UniformGroup({ * uCoolMatrix: {value:new Matrix(), type: 'mat4'}, * uFloatyMcFloatFace: {value:23, type: 'f32'}, * }} * * // modify the data * myCoolData.uniforms.uFloatyMcFloatFace = 42; * // Build a shader... * const shader = Shader.from(srcVert, srcFrag, { * myCoolData // Name matches the UBO name in the shader. Will be processed accordingly. * }) * ``` * @category rendering * @advanced */ export declare class UniformGroup implements BindResource { /** The default options used by the uniform group. */ static defaultOptions: UniformGroupOptions; /** * used internally to know if a uniform group was used in the last render pass * @internal */ _touched: number; /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** * a resource type, used to identify how to handle it when its in a bind group / shader resource * @internal */ _resourceType: string; /** * the resource id used internally by the renderer to build bind group keys * @internal */ _resourceId: number; /** the structures of the uniform group */ uniformStructures: UNIFORMS; /** the uniforms as an easily accessible map of properties */ uniforms: ExtractUniformObject; /** true if it should be used as a uniform buffer object */ ubo: boolean; /** an underlying buffer that will be uploaded to the GPU when using this UniformGroup */ buffer?: Buffer$1; /** * if true, then you are responsible for when the data is uploaded to the GPU. * otherwise, the data is reuploaded each frame. */ isStatic: boolean; /** used ito identify if this is a uniform group */ readonly isUniformGroup = true; /** * used to flag if this Uniform groups data is different from what it has stored in its buffer / on the GPU * @internal */ _dirtyId: number; /** * a signature string generated for internal use * @internal */ readonly _signature: number; readonly destroyed = false; /** * Create a new Uniform group * @param uniformStructures - The structures of the uniform group * @param options - The optional parameters of this uniform group */ constructor(uniformStructures: UNIFORMS, options?: UniformGroupOptions); /** Call this if you want the uniform groups data to be uploaded to the GPU only useful if `isStatic` is true. */ update(): void; } /** @internal */ export interface UboAdaptor { createUboElements: (uniformData: UniformData[]) => UboLayout; generateUboSync: (uboElements: UboElement[]) => UniformsSyncCallback; } /** * System plugin to the renderer to manage uniform buffers. * @category rendering * @advanced */ export declare class UboSystem implements System { /** Cache of uniform buffer layouts and sync functions, so we don't have to re-create them */ private _syncFunctionHash; private readonly _adaptor; constructor(adaptor: UboAdaptor); /** * Overridable function by `pixi.js/unsafe-eval` to silence * throwing an error if platform doesn't support unsafe-evals. * @private */ private _systemCheck; ensureUniformGroup(uniformGroup: UniformGroup): void; getUniformGroupData(uniformGroup: UniformGroup): { layout: UboLayout; syncFunction: (uniforms: Record, data: Float32Array, dataInt32: Int32Array, offset: number) => void; }; private _initUniformGroup; private _generateUboSync; syncUniformGroup(uniformGroup: UniformGroup, data?: Float32Array, offset?: number): boolean; updateUniformGroup(uniformGroup: UniformGroup): boolean; destroy(): void; } /** * System plugin to the renderer to manage uniform buffers. With a WGSL twist! * @category rendering * @advanced */ export declare class GpuUboSystem extends UboSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "ubo"; }; constructor(); } /** * A resource that can be bound to a bind group and used in a shader. * Whilst a buffer can be used as a resource, this class allows you to specify an offset and size of the buffer to use. * This is useful if you have a large buffer and only part of it is used in a shader. * * This resource, will listen for changes on the underlying buffer and emit a itself if the buffer changes shape. * @example * * const buffer = new Buffer({ * data: new Float32Array(1000), * usage: BufferUsage.UNIFORM, * }); * // Create a buffer resource that uses the first 100 bytes of a buffer * const bufferResource = new BufferResource({ * buffer, * offset: 0, * size: 100, * }); * @category rendering * @advanced */ export declare class BufferResource extends EventEmitter<{ change: BindResource; }> implements BindResource { /** * emits when the underlying buffer has changed shape (i.e. resized) * letting the renderer know that it needs to discard the old buffer on the GPU and create a new one * @event change */ /** a unique id for this uniform group used through the renderer */ readonly uid: number; /** * a resource type, used to identify how to handle it when its in a bind group / shader resource * @internal */ readonly _resourceType = "bufferResource"; /** * used internally to know if a uniform group was used in the last render pass * @internal */ _touched: number; /** * the resource id used internally by the renderer to build bind group keys * @internal */ _resourceId: number; /** the underlying buffer that this resource is using */ buffer: Buffer$1; /** the offset of the buffer this resource is using. If not provided, then it will use the offset of the buffer. */ readonly offset: number; /** the size of the buffer this resource is using. If not provided, then it will use the size of the buffer. */ readonly size: number; /** * A cheeky hint to the GL renderer to let it know this is a BufferResource * @internal */ readonly _bufferResource = true; /** * Has the Buffer resource been destroyed? * @readonly */ destroyed: boolean; /** * Create a new Buffer Resource. * @param options - The options for the buffer resource * @param options.buffer - The underlying buffer that this resource is using * @param options.offset - The offset of the buffer this resource is using. * If not provided, then it will use the offset of the buffer. * @param options.size - The size of the buffer this resource is using. * If not provided, then it will use the size of the buffer. */ constructor({ buffer, offset, size }: { buffer: Buffer$1; offset?: number; size?: number; }); protected onBufferChange(): void; /** * Destroys this resource. Make sure the underlying buffer is not used anywhere else * if you want to destroy it as well, or code will explode * @param destroyBuffer - Should the underlying buffer be destroyed as well? */ destroy(destroyBuffer?: boolean): void; } /** @internal */ export declare class GpuUniformBatchPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipes ]; readonly name: "uniformBatch"; }; private _renderer; private _bindGroupHash; private readonly _batchBuffer; private _buffers; private _bindGroups; private _bufferResources; constructor(renderer: WebGPURenderer); renderEnd(): void; private _resetBindGroups; getUniformBindGroup(group: UniformGroup, duplicate: boolean): BindGroup; getUboResource(group: UniformGroup): BufferResource; getArrayBindGroup(data: Float32Array): BindGroup; getArrayBufferResource(data: Float32Array): BufferResource; private _getBufferResource; private _getBindGroup; private _uploadBindGroups; destroy(): void; } /** * A system that creates and manages the GPU pipelines. * * Caching Mechanism: At its core, the system employs a two-tiered caching strategy to minimize * the redundant creation of GPU pipelines (or "pipes"). This strategy is based on generating unique * keys that represent the state of the graphics settings and the specific requirements of the * item being rendered. By caching these pipelines, subsequent draw calls with identical configurations * can reuse existing pipelines instead of generating new ones. * * State Management: The system differentiates between "global" state properties (like color masks * and stencil masks, which do not change frequently) and properties that may vary between draw calls * (such as geometry, shaders, and blend modes). Unique keys are generated for both these categories * using getStateKey for global state and getGraphicsStateKey for draw-specific settings. These keys are * then then used to caching the pipe. The next time we need a pipe we can check * the cache by first looking at the state cache and then the pipe cache. * @category rendering * @advanced */ export declare class PipelineSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "pipeline"; }; private readonly _renderer; protected CONTEXT_UID: number; private _moduleCache; private _bufferLayoutsCache; private readonly _bindingNamesCache; private _pipeCache; private readonly _pipeStateCaches; private _gpu; private _stencilState; private _stencilMode; private _colorMask; private _multisampleCount; private _depthStencilAttachment; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; setMultisampleCount(multisampleCount: number): void; setRenderTarget(renderTarget: GpuRenderTarget): void; setColorMask(colorMask: number): void; setStencilMode(stencilMode: STENCIL_MODES): void; setPipeline(geometry: Geometry, program: GpuProgram, state: State, passEncoder: GPURenderPassEncoder): void; getPipeline(geometry: Geometry, program: GpuProgram, state: State, topology?: Topology): GPURenderPipeline; private _createPipeline; private _getModule; private _createModule; private _generateBufferKey; private _generateAttributeLocationsKey; /** * Returns a hash of buffer names mapped to bind locations. * This is used to bind the correct buffer to the correct location in the shader. * @param geometry - The geometry where to get the buffer names * @param program - The program where to get the buffer names * @returns An object of buffer names mapped to the bind location. */ getBufferNamesToBind(geometry: Geometry, program: GpuProgram): Record; private _createVertexBufferLayouts; private _updatePipeHash; destroy(): void; } /** * The WebGPU adaptor for the render target system. Allows the Render Target System to * be used with the WebGPU renderer * @category rendering * @ignore */ export declare class GpuRenderTargetAdaptor implements RenderTargetAdaptor { private _renderTargetSystem; private _renderer; init(renderer: WebGPURenderer, renderTargetSystem: RenderTargetSystem): void; copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; startRenderPass(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; finishRenderPass(): void; /** * returns the gpu texture for the first color texture in the render target * mainly used by the filter manager to get copy the texture for blending * @param renderTarget * @returns a gpu texture */ private _getGpuColorTexture; getDescriptor(renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, clearValue: RgbaArray): GPURenderPassDescriptor; clear(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; initGpuRenderTarget(renderTarget: RenderTarget): GpuRenderTarget; destroyGpuRenderTarget(gpuRenderTarget: GpuRenderTarget): void; ensureDepthStencilTexture(renderTarget: RenderTarget): void; resizeGpuRenderTarget(renderTarget: RenderTarget): void; } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGl renderer * @category rendering * @advanced */ export declare class GpuRenderTargetSystem extends RenderTargetSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "renderTarget"; }; adaptor: GpuRenderTargetAdaptor; constructor(renderer: WebGPURenderer); } /** * Data structure for GPU program layout. * Contains bind group layouts and pipeline layout. * @category rendering * @advanced */ export interface GPUProgramData { bindGroups: GPUBindGroupLayout[]; pipeline: GPUPipelineLayout; } /** * A system that manages the rendering of GpuPrograms. * @category rendering * @advanced */ export declare class GpuShaderSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "shader"; }; private _gpu; private readonly _gpuProgramData; protected contextChange(gpu: GPU$1): void; getProgramData(program: GpuProgram): GPUProgramData; private _createGPUProgramData; destroy(): void; } /** * System plugin to the renderer to manage WebGL state machines. * @category rendering * @advanced */ export declare class GpuStateSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "state"; }; /** * State ID * @readonly */ stateId: number; /** * Polygon offset * @readonly */ polygonOffset: number; /** * Blend mode * @default 'none' * @readonly */ blendMode: BLEND_MODES; /** Whether current blend equation is different */ protected _blendEq: boolean; /** * GL context * @type {WebGLRenderingContext} * @readonly */ protected gpu: GPU$1; /** * Default WebGL State * @readonly */ protected defaultState: State; constructor(); protected contextChange(gpu: GPU$1): void; /** * Gets the blend mode data for the current state * @param state - The state to get the blend mode from */ getColorTargets(state: State): GPUColorTargetState[]; destroy(): void; } /** * Data about the pixels of a texture. * This includes the pixel data as a Uint8ClampedArray, and the width and height of the texture. * @category rendering * @advanced */ export type GetPixelsOutput = { pixels: Uint8ClampedArray; width: number; height: number; }; /** @internal */ export interface CanvasGenerator { generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; } /** * The system that handles textures for the GPU. * @category rendering * @advanced */ export declare class GpuTextureSystem implements System, CanvasGenerator { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUSystem ]; readonly name: "texture"; }; readonly managedTextures: TextureSource[]; protected CONTEXT_UID: number; private _gpuSources; private _gpuSamplers; private _bindGroupHash; private _textureViewHash; private readonly _uploads; private _gpu; private _mipmapGenerator?; private readonly _renderer; constructor(renderer: WebGPURenderer); protected contextChange(gpu: GPU$1): void; /** * Initializes a texture source, if it has already been initialized nothing will happen. * @param source - The texture source to initialize. * @returns The initialized texture source. */ initSource(source: TextureSource): GPUTexture; private _initSource; protected onSourceUpdate(source: TextureSource): void; protected onSourceUnload(source: TextureSource): void; protected onUpdateMipmaps(source: TextureSource): void; protected onSourceDestroy(source: TextureSource): void; protected onSourceResize(source: TextureSource): void; private _initSampler; getGpuSampler(sampler: TextureStyle): GPUSampler; getGpuSource(source: TextureSource): GPUTexture; /** * this returns s bind group for a specific texture, the bind group contains * - the texture source * - the texture style * - the texture matrix * This is cached so the bind group should only be created once per texture * @param texture - the texture you want the bindgroup for * @returns the bind group for the texture */ getTextureBindGroup(texture: Texture): BindGroup; private _createTextureBindGroup; getTextureView(texture: BindableTexture): GPUTextureView; private _createTextureView; generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; destroy(): void; } interface System$1 { extension: { name: string; }; defaultOptions?: any; new (...args: any): any; } type SystemsWithExtensionList = System$1[]; type InstanceType$1 any> = T extends new (...args: any) => infer R ? R : any; type NameType = T[number]["extension"]["name"]; /** * Create a mapped type where each property key is a 'name' value, * and each property value is an ElementType with a matching 'name' * @internal */ export type ExtractSystemTypes = { [K in NameType]: InstanceType$1>; }; type NotUnknown = T extends unknown ? keyof T extends never ? never : T : T; type KnownProperties = { [K in keyof T as NotUnknown extends never ? never : K]: T[K]; }; type FlattenOptions = T extends { [K: string]: infer U; } ? U : never; type OptionsUnion = FlattenOptions>; type DefaultOptionsTypes = { [K in NameType]: Extract["defaultOptions"]; }; type SeparateOptions = KnownProperties>; type UnionToIntersection = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; /** @internal */ export type ExtractRendererOptions = UnionToIntersection>; /** @internal */ export interface BatcherAdaptor { start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; init?(batchPipe: BatcherPipe): void; execute(batchPipe: BatcherPipe, batch: Batch): void; contextChange?(): void; } /** * A pipe that batches elements into batches and sends them to the renderer. * * You can install new Batchers using ExtensionType.Batcher. Each render group will * have a default batcher and any required ones will be created on demand. * @category rendering * @advanced */ export declare class BatcherPipe implements InstructionPipe, BatchPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "batch"; }; state: State; renderer: Renderer; private readonly _batchersByInstructionSet; private _adaptor; /** A record of all active batchers, keyed by their names */ private _activeBatches; /** The currently active batcher being used to batch elements */ private _activeBatch; static _availableBatchers: Record Batcher>; static getBatcher(name: string): Batcher; constructor(renderer: Renderer, adaptor: BatcherAdaptor); buildStart(instructionSet: InstructionSet): void; addToBatch(batchableObject: BatchableElement, instructionSet: InstructionSet): void; break(instructionSet: InstructionSet): void; buildEnd(instructionSet: InstructionSet): void; upload(instructionSet: InstructionSet): void; execute(batch: Batch): void; destroy(): void; } /** * A BatcherAdaptor that uses WebGL to render batches. * @category rendering * @ignore */ export declare class GlBatchAdaptor implements BatcherAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipesAdaptor ]; readonly name: "batch"; }; private readonly _tempState; /** * We only want to sync the a batched shaders uniforms once on first use * this is a hash of shader uids to a boolean value. When the shader is first bound * we set the value to true. When the shader is bound again we check the value and * if it is true we know that the uniforms have already been synced and we skip it. */ private _didUploadHash; init(batcherPipe: BatcherPipe): void; contextChange(): void; start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; execute(batchPipe: BatcherPipe, batch: Batch): void; } /** * The WebGL rendering context type used by the PixiJS WebGL renderer. * This is typically a `WebGL2RenderingContext`, which is the default for PixiJS. * It is used to ensure that the renderer operates with the correct context type. * @category rendering * @advanced */ export type GlRenderingContext = WebGL2RenderingContext; /** * @param maxIfs * @param gl * @internal */ export declare function checkMaxIfStatementsInShader(maxIfs: number, gl: GlRenderingContext): number; /** * Returns the maximum number of textures that can be batched. This uses WebGL1's `MAX_TEXTURE_IMAGE_UNITS`. * The response for this is that to get this info via WebGPU, we would need to make a context, which * would make this function async, and we want to avoid that. * @private * @deprecated Use `Renderer.limits.maxBatchableTextures` instead. * @returns {number} The maximum number of textures that can be batched */ export declare function getMaxTexturesPerBatch(): number; /** * @param maxTextures * @internal */ export declare function generateGPULayout(maxTextures: number): GPUBindGroupLayoutEntry[]; /** * @param maxTextures * @internal */ export declare function generateLayout(maxTextures: number): Record; /** * @param textures * @param size * @param maxTextures * @internal */ export declare function getTextureBatchBindGroup(textures: TextureSource[], size: number, maxTextures: number): BindGroup; /** * A BatcherAdaptor that uses the GPU to render batches. * @category rendering * @ignore */ export declare class GpuBatchAdaptor implements BatcherAdaptor { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGPUPipesAdaptor ]; readonly name: "batch"; }; private _shader; private _geometry; start(batchPipe: BatcherPipe, geometry: Geometry, shader: Shader): void; execute(batchPipe: BatcherPipe, batch: Batch): void; } /** * the vertex source code, an obj * @internal */ export type Vertex = { /** stick uniforms and functions in here all headers will be compiled at the top of the shader */ header?: string; /** code will be added at the start of the shader */ start?: string; /** code will be run here before lighting happens */ main?: string; /** code here will to modify anything before it is passed to the fragment shader */ end?: string; }; /** @internal */ export type Fragment = { /** stick uniforms and functions in here all headers will be compiled at the top of the shader */ header?: string; /** code will be added at the start of the shader */ start?: string; /** code will be run here before lighting happens */ main?: string; /** code here will to modify anything before it is passed to the fragment shader */ end?: string; }; /** * HighShaderBit is a part of a shader. * it is used to compile HighShaders. * * Internally shaders are made up of many of these. * You can even write your own and compile them in. * @internal */ export interface HighShaderBit { /** used to make the shader easier to understand! */ name?: string; /** the snippets of vertex code */ vertex?: Vertex; /** the snippets of fragment code */ fragment?: Fragment; } /** * source code to compile a shader. this can be directly used by pixi and should be good to go! * It contains the vertex and fragment source code. * This is the final output of the HighShader compiler. * It is used to create a shader program. * @internal */ export interface HighShaderSource { fragment: string; vertex: string; } /** * @param root0 * @param root0.bits * @param root0.name * @internal */ export declare function compileHighShaderGpuProgram({ bits, name }: { bits: HighShaderBit[]; name: string; }): GpuProgram; /** * @param root0 * @param root0.bits * @param root0.name * @internal */ export declare function compileHighShaderGlProgram({ bits, name }: { bits: HighShaderBit[]; name: string; }): GlProgram; /** * A high template consists of vertex and fragment source * @internal */ export interface HighShaderTemplate { name?: string; fragment: string; vertex: string; } /** @internal */ export interface CompileHighShaderOptions { template: HighShaderTemplate; bits: HighShaderBit[]; } /** * This function will take a HighShader template, some High fragments and then merge them in to a shader source. * @param options * @param options.template * @param options.bits * @internal */ export declare function compileHighShader({ template, bits }: CompileHighShaderOptions): HighShaderSource; /** * This function will take a HighShader template, some High fragments and then merge them in to a shader source. * It is specifically for WebGL and does not compile inputs and outputs. * @param options * @param options.template - The HighShader template containing vertex and fragment source. * @param options.bits - An array of HighShaderBit objects to be compiled into the shader. * @returns A HighShaderSource object containing the compiled vertex and fragment shaders. * @internal */ export declare function compileHighShaderGl({ template, bits }: CompileHighShaderOptions): HighShaderSource; /** * takes the HighFragment source parts and adds them to the hook hash * @param srcParts - the hash of hook arrays * @param parts - the code to inject into the hooks * @param name - optional the name of the part to add * @internal */ export declare function addBits(srcParts: Record, parts: Record, name?: string): void; /** * takes a program string and returns an hash mapping the hooks to empty arrays * @param programSrc - the program containing hooks * @internal */ export declare function compileHooks(programSrc: string): Record; /** * @param fragments * @param template * @param sort * @internal */ export declare function compileInputs(fragments: any[], template: string, sort?: boolean): string; /** * @param fragments * @param template * @internal */ export declare function compileOutputs(fragments: any[], template: string): string; /** * formats a shader so its more pleasant to read * @param shader - a glsl shader program source * @category utils * @advanced */ export declare function formatShader(shader: string): string; /** * takes a shader src and replaces any hooks with the HighFragment code. * @param templateSrc - the program src template * @param fragmentParts - the fragments to inject * @internal */ export declare function injectBits(templateSrc: string, fragmentParts: Record): string; /** @ignore */ export declare const vertexGPUTemplate = "\n @in aPosition: vec2;\n @in aUV: vec2;\n\n @out @builtin(position) vPosition: vec4;\n @out vUV : vec2;\n @out vColor : vec4;\n\n {{header}}\n\n struct VSOutput {\n {{struct}}\n };\n\n @vertex\n fn main( {{in}} ) -> VSOutput {\n\n var worldTransformMatrix = globalUniforms.uWorldTransformMatrix;\n var modelMatrix = mat3x3(\n 1.0, 0.0, 0.0,\n 0.0, 1.0, 0.0,\n 0.0, 0.0, 1.0\n );\n var position = aPosition;\n var uv = aUV;\n\n {{start}}\n\n vColor = vec4(1., 1., 1., 1.);\n\n {{main}}\n\n vUV = uv;\n\n var modelViewProjectionMatrix = globalUniforms.uProjectionMatrix * worldTransformMatrix * modelMatrix;\n\n vPosition = vec4((modelViewProjectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);\n\n vColor *= globalUniforms.uWorldColorAlpha;\n\n {{end}}\n\n {{return}}\n };\n"; /** @ignore */ export declare const fragmentGPUTemplate = "\n @in vUV : vec2;\n @in vColor : vec4;\n\n {{header}}\n\n @fragment\n fn main(\n {{in}}\n ) -> @location(0) vec4 {\n\n {{start}}\n\n var outColor:vec4;\n\n {{main}}\n\n var finalColor:vec4 = outColor * vColor;\n\n {{end}}\n\n return finalColor;\n };\n"; /** @ignore */ export declare const vertexGlTemplate = "\n in vec2 aPosition;\n in vec2 aUV;\n\n out vec4 vColor;\n out vec2 vUV;\n\n {{header}}\n\n void main(void){\n\n mat3 worldTransformMatrix = uWorldTransformMatrix;\n mat3 modelMatrix = mat3(\n 1.0, 0.0, 0.0,\n 0.0, 1.0, 0.0,\n 0.0, 0.0, 1.0\n );\n vec2 position = aPosition;\n vec2 uv = aUV;\n\n {{start}}\n\n vColor = vec4(1.);\n\n {{main}}\n\n vUV = uv;\n\n mat3 modelViewProjectionMatrix = uProjectionMatrix * worldTransformMatrix * modelMatrix;\n\n gl_Position = vec4((modelViewProjectionMatrix * vec3(position, 1.0)).xy, 0.0, 1.0);\n\n vColor *= uWorldColorAlpha;\n\n {{end}}\n }\n"; /** @ignore */ export declare const fragmentGlTemplate = "\n\n in vec4 vColor;\n in vec2 vUV;\n\n out vec4 finalColor;\n\n {{header}}\n\n void main(void) {\n\n {{start}}\n\n vec4 outColor;\n\n {{main}}\n\n finalColor = outColor * vColor;\n\n {{end}}\n }\n"; /** @internal */ export declare const colorBit: { name: string; vertex: { header: string; main: string; }; }; /** @internal */ export declare const colorBitGl: { name: string; vertex: { header: string; main: string; }; }; /** * @param maxTextures * @internal */ export declare function generateTextureBatchBit(maxTextures: number): HighShaderBit; /** * @param maxTextures * @internal */ export declare function generateTextureBatchBitGl(maxTextures: number): HighShaderBit; /** @internal */ export declare const globalUniformsBit: { name: string; vertex: { header: string; }; }; /** @internal */ export declare const globalUniformsUBOBitGl: { name: string; vertex: { header: string; }; }; /** @internal */ export declare const globalUniformsBitGl: { name: string; vertex: { header: string; }; }; /** @internal */ export declare const localUniformBit: { name: string; vertex: { header: string; main: string; end: string; }; }; /** @internal */ export declare const localUniformBitGroup2: { vertex: { header: string; main: string; end: string; }; name: string; }; /** @internal */ export declare const localUniformBitGl: { name: string; vertex: { header: string; main: string; end: string; }; }; /** @internal */ export declare const roundPixelsBit: { name: string; vertex: { header: string; }; }; /** @internal */ export declare const roundPixelsBitGl: { name: string; vertex: { header: string; }; }; /** @internal */ export declare const textureBit: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; /** @internal */ export declare const textureBitGl: { name: string; vertex: { header: string; main: string; }; fragment: { header: string; main: string; }; }; /** * A generic class for managing a pool of items. * @template T The type of items in the pool. Must implement {@link PoolItem}. * @category utils * @advanced */ export declare class Pool { /** @internal */ readonly _classType: PoolItemConstructor; private readonly _pool; private _count; private _index; /** * Constructs a new Pool. * @param ClassType - The constructor of the items in the pool. * @param {number} [initialSize] - The initial size of the pool. */ constructor(ClassType: PoolItemConstructor, initialSize?: number); /** * Prepopulates the pool with a given number of items. * @param total - The number of items to add to the pool. */ prepopulate(total: number): void; /** * Gets an item from the pool. Calls the item's `init` method if it exists. * If there are no items left in the pool, a new one will be created. * @param {unknown} [data] - Optional data to pass to the item's constructor. * @returns {T} The item from the pool. */ get(data?: unknown): T; /** * Returns an item to the pool. Calls the item's `reset` method if it exists. * @param {T} item - The item to return to the pool. */ return(item: T): void; /** * Gets the number of items in the pool. * @readonly */ get totalSize(): number; /** * Gets the number of items in the pool that are free to use without needing to create more. * @readonly */ get totalFree(): number; /** * Gets the number of items in the pool that are currently in use. * @readonly */ get totalUsed(): number; /** clears the pool - mainly used for debugging! */ clear(): void; } /** * An object that can be stored in a {@link Pool}. * @category utils * @advanced */ export type PoolItem = { init?: (data?: any) => void; reset?: () => void; [key: string]: any; }; /** * The constructor of an object that can be stored in a {@link Pool}. * @typeParam K - The type of the object that can be stored in a {@link Pool}. * @category utils * @advanced */ export type PoolItemConstructor = new () => K; /** * AlphaMask is an effect that applies a mask to a container using the alpha channel of a sprite. * It can be used to create complex masking effects by using a sprite as the mask. * The mask can be inverted, and it can render the mask to a texture if the mask is not a sprite. * @category rendering * @advanced */ export declare class AlphaMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: Container; inverse: boolean; pipe: string; renderMaskToTexture: boolean; constructor(options?: { mask: Container; }); init(mask: Container): void; reset(): void; addBounds(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; static test(mask: any): boolean; } /** * The filter pipeline is responsible for applying filters scene items! * * KNOWN BUGS: * 1. Global bounds calculation is incorrect if it is used when flip flopping filters. The maths can be found below * eg: filters [noiseFilter, blurFilter] noiseFilter will calculate the global bounds incorrectly. * * 2. RenderGroups do not work with filters. This is because the renderGroup matrix is not currently taken into account. * * Implementation notes: * 1. Gotcha - nesting filters that require blending will not work correctly. This creates a chicken and egg problem * the complexity and performance required to do this is not worth it i feel.. but lets see if others agree! * * 2. Filters are designed to be changed on the fly, this is means that changing filter information each frame will * not trigger an instruction rebuild. If you are constantly turning a filter on and off.. its therefore better to set * enabled to true or false on the filter. Or setting an empty array. * * 3. Need to look at perhaps aliasing when flip flopping filters. Really we should only need to antialias the FIRST * Texture we render too. The rest can be non aliased. This might help performance. * Currently we flip flop with an antialiased texture if antialiasing is enabled on the filter. * @internal */ export interface FilterInstruction extends Instruction { renderPipeId: "filter"; action: "pushFilter" | "popFilter"; container?: Container; renderables?: Renderable[]; filterEffect: FilterEffect; } /** * System that manages the filter pipeline * @category rendering * @advanced */ export declare class FilterSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "filter"; }; readonly renderer: Renderer; private _filterStackIndex; private _filterStack; private readonly _filterGlobalUniforms; private readonly _globalFilterBindGroup; private _activeFilterData; constructor(renderer: Renderer); /** * The back texture of the currently active filter. Requires the filter to have `blendRequired` set to true. * @readonly */ get activeBackTexture(): Texture | undefined; /** * Pushes a filter instruction onto the filter stack. * @param instruction - The instruction containing the filter effect and container. * @internal */ push(instruction: FilterInstruction): void; /** * Applies filters to a texture. * * This method takes a texture and a list of filters, applies the filters to the texture, * and returns the resulting texture. * @param {object} params - The parameters for applying filters. * @param {Texture} params.texture - The texture to apply filters to. * @param {Filter[]} params.filters - The filters to apply. * @returns {Texture} The resulting texture after all filters have been applied. * @example * * ```ts * // Create a texture and a list of filters * const texture = new Texture(...); * const filters = [new BlurFilter(), new ColorMatrixFilter()]; * * // Apply the filters to the texture * const resultTexture = filterSystem.applyToTexture({ texture, filters }); * * // Use the resulting texture * sprite.texture = resultTexture; * ``` * * Key Points: * 1. padding is not currently supported here - so clipping may occur with filters that use padding. * 2. If all filters are disabled or skipped, the original texture is returned. */ generateFilteredTexture({ texture, filters }: { texture: Texture; filters: Filter[]; }): Texture; /** @internal */ pop(): void; /** * Copies the last render surface to a texture. * @param lastRenderSurface - The last render surface to copy from. * @param bounds - The bounds of the area to copy. * @param previousBounds - The previous bounds to use for offsetting the copy. */ getBackTexture(lastRenderSurface: RenderTarget, bounds: Bounds, previousBounds?: Bounds): Texture>; /** * Applies a filter to a texture. * @param filter - The filter to apply. * @param input - The input texture. * @param output - The output render surface. * @param clear - Whether to clear the output surface before applying the filter. */ applyFilter(filter: Filter, input: Texture, output: RenderSurface, clear: boolean): void; /** * Multiply _input normalized coordinates_ to this matrix to get _sprite texture normalized coordinates_. * * Use `outputMatrix * vTextureCoord` in the shader. * @param outputMatrix - The matrix to output to. * @param {Sprite} sprite - The sprite to map to. * @returns The mapped matrix. */ calculateSpriteMatrix(outputMatrix: Matrix, sprite: Sprite): Matrix; destroy(): void; /** * Sets up the bind groups and renders the filter. * @param filter - The filter to apply * @param input - The input texture * @param renderer - The renderer instance */ private _setupBindGroupsAndRender; /** * Sets up the filter textures including input texture and back texture if needed. * @param filterData - The filter data to update * @param bounds - The bounds for the texture * @param renderer - The renderer instance * @param previousFilterData - The previous filter data for back texture calculation */ private _setupFilterTextures; /** * Calculates and sets the global frame for the filter. * @param filterData - The filter data to update * @param offsetX - The X offset * @param offsetY - The Y offset * @param globalResolution - The global resolution * @param sourceWidth - The source texture width * @param sourceHeight - The source texture height */ private _calculateGlobalFrame; /** * Updates the filter uniforms with the current filter state. * @param input - The input texture * @param output - The output render surface * @param filterData - The current filter data * @param offsetX - The X offset for positioning * @param offsetY - The Y offset for positioning * @param resolution - The current resolution * @param isFinalTarget - Whether this is the final render target * @param clear - Whether to clear the output surface */ private _updateFilterUniforms; /** * Finds the correct resolution by looking back through the filter stack. * @param rootResolution - The fallback root resolution to use * @returns The resolution from the previous filter or root resolution */ private _findFilterResolution; /** * Finds the offset from the previous non-skipped filter in the stack. * @returns The offset coordinates from the previous filter */ private _findPreviousFilterOffset; /** * Calculates the filter area bounds based on the instruction type. * @param instruction - The filter instruction * @param bounds - The bounds object to populate */ private _calculateFilterArea; private _applyFiltersToTexture; private _calculateFilterBounds; private _popFilterData; private _getPreviousFilterData; private _pushFilterData; } /** * The options to use when creating a new filter. * @category filters * @advanced */ export interface FilterOptions { /** optional blend mode used by the filter when rendering (defaults to 'normal') */ blendMode?: BLEND_MODES; /** * the resolution the filter should be rendered at. The lower the resolution, the more performant * the filter will be, but the lower the quality of the output. (default 1) * If 'inherit', the resolution of the render target is used. * Consider lowering this for things like blurs filters */ resolution?: number | "inherit"; /** * the amount of pixels to pad the container with when applying the filter. For example a blur extends the * container out as it blurs, so padding is applied to ensure that extra detail is rendered as well * without clipping occurring. (default 0) */ padding?: number; /** * If true the filter will make use of antialiasing. Although it looks better this can have a performance impact. * If set to 'inherit', the filter will detect the antialiasing of the render target and change this automatically. * Definitely don't set this to true if the render target has antialiasing set to false. As it will antialias, * but you won't see the difference. (default 'off') * * This can be a boolean or [FilterAntialias]{@link FilterAntialias} string. */ antialias?: FilterAntialias | boolean; /** * If this is set to true, the filter system will grab a snap shot of the area being rendered * to and pass this into the shader. This is useful for blend modes that need to be aware of the pixels * they are rendering to. Only use if you need that data, otherwise its an extra gpu copy you don't need! * (default false) */ blendRequired?: boolean; /** * If this is set to true, the filter system will clip filter texture into viewport * This is useful for filters that applied to whole texture. * (default true) */ clipToViewport?: boolean; } /** * Filter options mixed with shader resources. A filter needs a shader and some resources to work. * @category filters * @advanced * @see {@link FilterOptions} */ export type FilterWithShader = FilterOptions & IShaderWithResources; /** * The antialiasing mode of the filter. This can be either: * - `on` - the filter is always antialiased regardless of the render target settings * - `off` - (default) the filter is never antialiased regardless of the render target settings * - `inherit` - the filter uses the antialias settings of the render target * @category filters * @advanced */ export type FilterAntialias = "on" | "off" | "inherit"; /** * The Filter class is the base for all filter effects used in Pixi.js * As it extends a shader, it requires that a glProgram is parsed in to work with WebGL and a gpuProgram for WebGPU. * If you don't proved one, then the filter is skipped and just rendered as if it wasn't there for that renderer. * * A filter can be applied to anything that extends Container in Pixi.js which also includes Sprites, Graphics etc. * * Its worth noting Performance-wise filters can be pretty expensive if used too much in a single scene. * The following happens under the hood when a filter is applied: * * .1. Break the current batch *
* .2. The target is measured using getGlobalBounds * (recursively go through all children and figure out how big the object is) *
* .3. Get the closest Po2 Textures from the texture pool *
* .4. Render the target to that texture *
* .5. Render that texture back to the main frame buffer as a quad using the filters program. *
*
* Some filters (such as blur) require multiple passes too which can result in an even bigger performance hit. So be careful! * Its not generally the complexity of the shader that is the bottle neck, * but all the framebuffer / shader switching that has to take place. * One filter applied to a container with many objects is MUCH faster than many filter applied to many objects. * @category filters * @advanced * @example * import { Filter } from 'pixi.js'; * * const customFilter = new Filter({ * glProgram: new GlProgram({ * fragment, * vertex, * }), * resources: { * timeUniforms: { * uTime: { value: 0.0, type: 'f32' }, * }, * }, * }); * * // Apply the filter * sprite.filters = [customFilter]; * * // Update uniform * app.ticker.add((ticker) => { * filter.resources.timeUniforms.uniforms.uTime += 0.04 * ticker.deltaTime; * }); */ export declare class Filter extends Shader { /** The default filter settings */ static defaultOptions: FilterOptions; /** * The padding of the filter. Some filters require extra space to breath such as a blur. * Increasing this will add extra width and height to the bounds of the object that the * filter is applied to. * @default 0 */ padding: number; /** * should the filter use antialiasing? * @default inherit */ antialias: FilterAntialias; /** If enabled is true the filter is applied, if false it will not. */ enabled: boolean; /** * The gpu state the filter requires to render. * @internal */ _state: State; /** * The resolution of the filter. Setting this to be lower will lower the quality but * increase the performance of the filter. * @default 1 */ resolution: number | "inherit"; /** * Whether or not this filter requires the previous render texture for blending. * @default false */ blendRequired: boolean; /** * Clip texture into viewport or not * @default true */ clipToViewport: boolean; /** * @param options - The optional parameters of this filter. */ constructor(options: FilterWithShader); /** * Applies the filter * @param filterManager - The renderer to retrieve the filter from * @param input - The input render target. * @param output - The target to output to. * @param clearMode - Should the output be cleared before rendering to it */ apply(filterManager: FilterSystem, input: Texture, output: RenderSurface, clearMode: boolean): void; /** * Get the blend mode of the filter. * @default "normal" */ get blendMode(): BLEND_MODES; /** Sets the blend mode of the filter. */ set blendMode(value: BLEND_MODES); /** * A short hand function to create a filter based of a vertex and fragment shader src. * @param options * @returns A shiny new PixiJS filter! */ static from(options: FilterOptions & ShaderFromResources): Filter; } /** * A filter effect is an effect that can be applied to a container that involves applying special pixel effects * to that container as it is rendered. Used internally when the filters property is modified on a container. * @internal */ export declare class FilterEffect implements Effect { /** read only filters array - to modify, set it again! */ filters: readonly Filter[]; /** * If specified, rather than calculating the bounds of the container that the filter * will apply to, we use this rect instead. This is a local rect - so will have the containers transform * applied to it */ filterArea?: Rectangle; /** the pipe that knows how to handle this effect */ pipe: string; /** the priority of this effect */ priority: number; destroy(): void; } type MaskMode = "pushMaskBegin" | "pushMaskEnd" | "popMaskBegin" | "popMaskEnd"; declare class AlphaMaskEffect extends FilterEffect implements PoolItem { constructor(); get sprite(): Sprite; set sprite(value: Sprite); get inverse(): boolean; set inverse(value: boolean); init: () => void; } /** @internal */ export interface AlphaMaskInstruction extends Instruction { renderPipeId: "alphaMask"; action: MaskMode; mask: AlphaMask; inverse: boolean; maskedContainer: Container; renderMask: boolean; } /** @internal */ export interface AlphaMaskData { filterEffect: AlphaMaskEffect; maskedContainer: Container; previousRenderTarget?: RenderTarget; filterTexture?: Texture; } /** @internal */ export declare class AlphaMaskPipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "alphaMask"; }; private _renderer; private _activeMaskStage; constructor(renderer: Renderer); push(mask: Effect, maskedContainer: Container, instructionSet: InstructionSet): void; pop(mask: Effect, _maskedContainer: Container, instructionSet: InstructionSet): void; execute(instruction: AlphaMaskInstruction): void; destroy(): void; } /** * The ColorMask effect allows you to apply a color mask to the rendering process. * This can be useful for selectively rendering certain colors or for creating * effects based on color values. * @category rendering * @advanced */ export declare class ColorMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: number; pipe: string; constructor(options: { mask: number; }); init(mask: number): void; destroy(): void; static test(mask: any): boolean; } /** @internal */ export interface ColorMaskInstruction extends Instruction { renderPipeId: "colorMask"; colorMask: number; } /** @internal */ export declare class ColorMaskPipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorStack; private _colorStackIndex; private _currentColor; constructor(renderer: Renderer); buildStart(): void; push(mask: Effect, _container: Container, instructionSet: InstructionSet): void; pop(_mask: Effect, _container: Container, instructionSet: InstructionSet): void; execute(instruction: ColorMaskInstruction): void; destroy(): void; } interface MaskConversionTest { test: (item: any) => boolean; maskClass: new (item: any) => Effect & PoolItem; } /** * Represents a mask effect that can be applied to a container. * @category rendering * @advanced */ export type MaskEffect = { mask: unknown; } & Effect; /** * A class that manages the conversion of masks to mask effects. * @category rendering * @ignore */ export declare class MaskEffectManagerClass { /** @private */ readonly _effectClasses: EffectConstructor[]; private readonly _tests; private _initialized; init(): void; add(test: MaskConversionTest): void; getMaskEffect(item: any): MaskEffect; returnMaskEffect(effect: Effect & PoolItem): void; } /** * A class that manages the conversion of masks to mask effects. * @class * @category rendering * @advanced */ export declare const MaskEffectManager: MaskEffectManagerClass; /** * ScissorMask is an effect that applies a scissor mask to a container. * It restricts rendering to the area defined by the mask. * The mask is a Container that defines the area to be rendered. * The mask must be a Container that is not renderable or measurable. * This effect is used to create clipping regions in the rendering process. * @category rendering * @advanced */ export declare class ScissorMask implements Effect { priority: number; mask: Container; pipe: string; constructor(mask: Container); addBounds(bounds: Bounds, skipUpdateTransform?: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; reset(): void; destroy(): void; } /** * A mask that uses the stencil buffer to clip the rendering of a container. * This is useful for complex masks that cannot be achieved with simple shapes. * It is more performant than using a `Graphics` mask, but requires WebGL support. * It is also useful for masking with `Container` objects that have complex shapes. * @category rendering * @advanced */ export declare class StencilMask implements Effect, PoolItem { static extension: ExtensionMetadata; priority: number; mask: Container; pipe: string; constructor(options: { mask: Container; }); init(mask: Container): void; reset(): void; addBounds(bounds: Bounds, skipUpdateTransform: boolean): void; addLocalBounds(bounds: Bounds, localRoot: Container): void; containsPoint(point: Point, hitTestFn: (container: Container, point: Point) => boolean): boolean; destroy(): void; static test(mask: any): boolean; } type MaskMode$1 = "pushMaskBegin" | "pushMaskEnd" | "popMaskBegin" | "popMaskEnd"; /** @internal */ export interface StencilMaskInstruction extends Instruction { renderPipeId: "stencilMask"; action: MaskMode$1; inverse: boolean; mask: StencilMask; } /** @internal */ export declare class StencilMaskPipe implements InstructionPipe { static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "stencilMask"; }; private _renderer; private _maskStackHash; private _maskHash; constructor(renderer: Renderer); push(mask: Effect, _container: Container, instructionSet: InstructionSet): void; pop(mask: Effect, _container: Container, instructionSet: InstructionSet): void; execute(instruction: StencilMaskInstruction): void; destroy(): void; } /** * @param mask * @param bounds * @param skipUpdateTransform * @internal */ export declare function addMaskBounds(mask: Container, bounds: Bounds, skipUpdateTransform: boolean): void; /** * @param mask * @param bounds * @param localRoot * @internal */ export declare function addMaskLocalBounds(mask: Container, bounds: Bounds, localRoot: Container): void; /** * Constants for various buffer types in Pixi * @category rendering * @advanced */ export declare enum BUFFER_TYPE { /** buffer type for using as an index buffer */ ELEMENT_ARRAY_BUFFER = 34963, /** buffer type for using attribute data */ ARRAY_BUFFER = 34962, /** the buffer type is for uniform buffer objects */ UNIFORM_BUFFER = 35345 } /** @internal */ export declare class GlBuffer { buffer: WebGLBuffer; updateID: number; byteLength: number; type: number; _lastBindBaseLocation: number; _lastBindCallId: number; constructor(buffer: WebGLBuffer, type: BUFFER_TYPE); } /** * System plugin to the renderer to manage buffers. * * WebGL uses Buffers as a way to store objects to the GPU. * This system makes working with them a lot easier. * * Buffers are used in three main places in WebGL * - geometry information * - Uniform information (via uniform buffer objects - a WebGL 2 only feature) * - Transform feedback information. (WebGL 2 only feature) * * This system will handle the binding of buffers to the GPU as well as uploading * them. With this system, you never need to work directly with GPU buffers, but instead work with * the Buffer class. * @class * @category rendering * @advanced */ export declare class GlBufferSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "buffer"; }; private _gl; private _gpuBuffers; /** Cache keeping track of the base bound buffer bases */ private _boundBufferBases; private _renderer; private _minBaseLocation; private _maxBindings; private _nextBindBaseIndex; private _bindCallId; /** * @param {Renderer} renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** @ignore */ destroy(): void; /** Sets up the renderer context and necessary buffers. */ protected contextChange(): void; getGlBuffer(buffer: Buffer$1): GlBuffer; /** * This binds specified buffer. On first run, it will create the webGL buffers for the context too * @param buffer - the buffer to bind to the renderer */ bind(buffer: Buffer$1): void; /** * Binds an uniform buffer to at the given index. * * A cache is used so a buffer will not be bound again if already bound. * @param glBuffer - the buffer to bind * @param index - the base index to bind it to. */ bindBufferBase(glBuffer: GlBuffer, index: number): void; nextBindBase(hasTransformFeedback: boolean): void; freeLocationForBufferBase(glBuffer: GlBuffer): number; getLastBindBaseLocation(glBuffer: GlBuffer): number; /** * Binds a buffer whilst also binding its range. * This will make the buffer start from the offset supplied rather than 0 when it is read. * @param glBuffer - the buffer to bind * @param index - the base index to bind at, defaults to 0 * @param offset - the offset to bind at (this is blocks of 256). 0 = 0, 1 = 256, 2 = 512 etc * @param size - the size to bind at (this is blocks of 256). */ bindBufferRange(glBuffer: GlBuffer, index?: number, offset?: number, size?: number): void; /** * Will ensure the data in the buffer is uploaded to the GPU. * @param {Buffer} buffer - the buffer to update */ updateBuffer(buffer: Buffer$1): GlBuffer; /** dispose all WebGL resources of all managed buffers */ destroyAll(): void; /** * Disposes buffer * @param {Buffer} buffer - buffer with data * @param {boolean} [contextLost=false] - If context was lost, we suppress deleteVertexArray */ protected onBufferDestroy(buffer: Buffer$1, contextLost?: boolean): void; /** * creates and attaches a GLBuffer object tied to the current context. * @param buffer * @protected */ protected createGLBuffer(buffer: Buffer$1): GlBuffer; resetState(): void; } /** * WebGL extensions for compressed textures using the PVRTC format. * @category rendering * @advanced */ interface WEBGL_compressed_texture_pvrtc$1 { COMPRESSED_RGB_PVRTC_4BPPV1_IMG: number; COMPRESSED_RGBA_PVRTC_4BPPV1_IMG: number; COMPRESSED_RGB_PVRTC_2BPPV1_IMG: number; COMPRESSED_RGBA_PVRTC_2BPPV1_IMG: number; } /** * WebGL extensions for texture compression using the ETC format. * @category rendering * @advanced */ interface WEBGL_compressed_texture_etc$1 { COMPRESSED_R11_EAC: number; COMPRESSED_SIGNED_R11_EAC: number; COMPRESSED_RG11_EAC: number; COMPRESSED_SIGNED_RG11_EAC: number; COMPRESSED_RGB8_ETC2: number; COMPRESSED_RGBA8_ETC2_EAC: number; COMPRESSED_SRGB8_ETC2: number; COMPRESSED_SRGB8_ALPHA8_ETC2_EAC: number; COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2: number; COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2: number; } /** * WebGL extensions for texture compression using the ETC1 format. * @category rendering * @advanced */ interface WEBGL_compressed_texture_etc1$1 { COMPRESSED_RGB_ETC1_WEBGL: number; } /** * WebGL extensions for texture compression using the ATC format. * @category rendering * @advanced */ export interface WEBGL_compressed_texture_atc { COMPRESSED_RGB_ATC_WEBGL: number; COMPRESSED_RGBA_ATC_EXPLICIT_ALPHA_WEBGL: number; COMPRESSED_RGBA_ATC_INTERPOLATED_ALPHA_WEBGL: number; } /** * WebGL extensions for texture compression using the BPTC format. * @category rendering * @advanced */ interface EXT_texture_compression_bptc$1 { COMPRESSED_RGBA_BPTC_UNORM_EXT: number; COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT: number; COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT: number; COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT: number; } /** * WebGL extensions for texture compression using the RGTC format. * @category rendering * @advanced */ interface EXT_texture_compression_rgtc$1 { COMPRESSED_RED_RGTC1_EXT: number; COMPRESSED_SIGNED_RED_RGTC1_EXT: number; COMPRESSED_RED_GREEN_RGTC2_EXT: number; COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT: number; } /** * WebGL extensions that are available in the current context. * @category rendering * @advanced */ export interface WebGLExtensions { drawBuffers?: WEBGL_draw_buffers; depthTexture?: OES_texture_float; loseContext?: WEBGL_lose_context; vertexArrayObject?: OES_vertex_array_object; anisotropicFiltering?: EXT_texture_filter_anisotropic; uint32ElementIndex?: OES_element_index_uint; floatTexture?: OES_texture_float; floatTextureLinear?: OES_texture_float_linear; textureHalfFloat?: OES_texture_half_float; textureHalfFloatLinear?: OES_texture_half_float_linear; colorBufferFloat?: EXT_color_buffer_float; vertexAttribDivisorANGLE?: ANGLE_instanced_arrays; s3tc?: WEBGL_compressed_texture_s3tc; s3tc_sRGB?: WEBGL_compressed_texture_s3tc_srgb; etc?: WEBGL_compressed_texture_etc$1; etc1?: WEBGL_compressed_texture_etc1$1; pvrtc?: WEBGL_compressed_texture_pvrtc$1; atc?: WEBGL_compressed_texture_atc; astc?: WEBGL_compressed_texture_astc; bptc?: EXT_texture_compression_bptc$1; rgtc?: EXT_texture_compression_rgtc$1; srgb?: EXT_sRGB; } /** * Options for the context system. * @category rendering * @advanced * @property {WebGL2RenderingContext | null} [context=null] - User-provided WebGL rendering context object. * @property {GpuPowerPreference} [powerPreference='default'] - An optional hint indicating what configuration * of GPU is suitable for the WebGL context, can be `'high-performance'` or `'low-power'`. Setting to `'high-performance'` * will prioritize rendering performance over power consumption, while setting to `'low-power'` will prioritize power saving * over rendering performance. * @property {boolean} [premultipliedAlpha=true] - Whether the compositor will assume the drawing buffer contains * colors with premultiplied alpha. * @property {boolean} [preserveDrawingBuffer=false] - Whether to enable drawing buffer preservation. * If enabled, the drawing buffer will preserve * its value until cleared or overwritten. Enable this if you need to call `toDataUrl` on the WebGL context. * @property {boolean} [antialias] - Whether to enable antialiasing. * @property {1 | 2} [preferWebGLVersion=2] - The preferred WebGL version to use. */ export interface ContextSystemOptions { /** * User-provided WebGL rendering context object. * @default null */ context: WebGL2RenderingContext | null; /** * An optional hint indicating what configuration of GPU is suitable for the WebGL context, * can be `'high-performance'` or `'low-power'`. * Setting to `'high-performance'` will prioritize rendering performance over power consumption, * while setting to `'low-power'` will prioritize power saving over rendering performance. * @default undefined */ powerPreference?: GpuPowerPreference; /** * Whether the compositor will assume the drawing buffer contains colors with premultiplied alpha. * @default true */ premultipliedAlpha: boolean; /** * Whether to enable drawing buffer preservation. If enabled, the drawing buffer will preserve * its value until cleared or overwritten. Enable this if you need to call `toDataUrl` on the WebGL context. * @default false */ preserveDrawingBuffer: boolean; antialias?: boolean; /** * The preferred WebGL version to use. * @default 2 */ preferWebGLVersion?: 1 | 2; /** * Whether to enable multi-view rendering. Set to true when rendering to multiple * canvases on the dom. * @default false */ multiView: boolean; } /** * System plugin to the renderer to manage the context * @category rendering * @advanced */ export declare class GlContextSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "context"; }; /** The default options for the system. */ static defaultOptions: ContextSystemOptions; protected CONTEXT_UID: number; protected gl: WebGL2RenderingContext; /** * Features supported by current renderer. * @type {object} * @readonly */ supports: { /** Support for 32-bit indices buffer. */ uint32Indices: boolean; /** Support for UniformBufferObjects */ uniformBufferObject: boolean; /** Support for VertexArrayObjects */ vertexArrayObject: boolean; /** Support for SRGB texture format */ srgbTextures: boolean; /** Support for wrapping modes if a texture is non-power of two */ nonPowOf2wrapping: boolean; /** Support for MSAA (antialiasing of dynamic textures) */ msaa: boolean; /** Support for mipmaps if a texture is non-power of two */ nonPowOf2mipmaps: boolean; }; /** * Extensions available. * @type {object} * @readonly * @property {WEBGL_draw_buffers} drawBuffers - WebGL v1 extension * @property {WEBGL_depth_texture} depthTexture - WebGL v1 extension * @property {OES_texture_float} floatTexture - WebGL v1 extension * @property {WEBGL_lose_context} loseContext - WebGL v1 extension * @property {OES_vertex_array_object} vertexArrayObject - WebGL v1 extension * @property {EXT_texture_filter_anisotropic} anisotropicFiltering - WebGL v1 and v2 extension */ extensions: WebGLExtensions; webGLVersion: 1 | 2; /** * Whether to enable multi-view rendering. Set to true when rendering to multiple * canvases on the dom. * @default false */ multiView: boolean; /** * The canvas that the WebGL Context is rendering to. * This will be the view canvas. But if multiView is enabled, this canvas will not be attached to the DOM. * It will be rendered to and then copied to the target canvas. * @readonly */ canvas: ICanvas; private _renderer; private _contextLossForced; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** * `true` if the context is lost * @readonly */ get isLost(): boolean; /** * Handles the context change event. * @param {WebGLRenderingContext} gl - New WebGL context. */ protected contextChange(gl: WebGL2RenderingContext): void; init(options: ContextSystemOptions): void; ensureCanvasSize(targetCanvas: ICanvas): void; /** * Initializes the context. * @protected * @param {WebGLRenderingContext} gl - WebGL context */ protected initFromContext(gl: WebGL2RenderingContext): void; /** * Initialize from context options * @protected * @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/getContext * @param preferWebGLVersion * @param {object} options - context attributes */ protected createContext(preferWebGLVersion: 1 | 2, options: WebGLContextAttributes): void; /** Auto-populate the {@link GlContextSystem.extensions extensions}. */ protected getExtensions(): void; /** * Handles a lost webgl context * @param {WebGLContextEvent} event - The context lost event. */ protected handleContextLost(event: WebGLContextEvent): void; /** Handles a restored webgl context. */ protected handleContextRestored(): void; destroy(): void; /** * this function can be called to force a webGL context loss * this will release all resources on the GPU. * Useful if you need to put Pixi to sleep, and save some GPU memory * * As soon as render is called - all resources will be created again. */ forceContextLoss(): void; /** * Validate context. * @param {WebGLRenderingContext} gl - Render context. */ protected validateContext(gl: WebGL2RenderingContext): void; } /** * System plugin to the renderer to manage geometry. * @category rendering * @advanced */ export declare class GlGeometrySystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "geometry"; }; /** * `true` if we has `*_vertex_array_object` extension. * @readonly */ hasVao: boolean; /** * `true` if has `ANGLE_instanced_arrays` extension. * @readonly */ hasInstance: boolean; protected gl: GlRenderingContext; protected _activeGeometry: Geometry; protected _activeVao: WebGLVertexArrayObject; protected _geometryVaoHash: Record>; /** Renderer that owns this {@link GeometrySystem}. */ private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); /** Sets up the renderer context and necessary buffers. */ protected contextChange(): void; /** * Binds geometry so that is can be drawn. Creating a Vao if required * @param geometry - Instance of geometry to bind. * @param program - Instance of program to use vao for. */ bind(geometry?: Geometry, program?: GlProgram): void; /** Reset and unbind any active VAO and geometry. */ resetState(): void; /** Update buffers of the currently bound geometry. */ updateBuffers(): void; /** * Check compatibility between a geometry and a program * @param geometry - Geometry instance. * @param program - Program instance. */ protected checkCompatibility(geometry: Geometry, program: GlProgram): void; /** * Takes a geometry and program and generates a unique signature for them. * @param geometry - To get signature from. * @param program - To test geometry against. * @returns - Unique signature of the geometry and program */ protected getSignature(geometry: Geometry, program: GlProgram): string; protected getVao(geometry: Geometry, program: GlProgram): WebGLVertexArrayObject; /** * Creates or gets Vao with the same structure as the geometry and stores it on the geometry. * If vao is created, it is bound automatically. We use a shader to infer what and how to set up the * attribute locations. * @param geometry - Instance of geometry to to generate Vao for. * @param program * @param _incRefCount - Increment refCount of all geometry buffers. */ protected initGeometryVao(geometry: Geometry, program: GlProgram, _incRefCount?: boolean): WebGLVertexArrayObject; /** * Disposes geometry. * @param geometry - Geometry with buffers. Only VAO will be disposed * @param [contextLost=false] - If context was lost, we suppress deleteVertexArray */ protected onGeometryDestroy(geometry: Geometry, contextLost?: boolean): void; /** * Dispose all WebGL resources of all managed geometries. * @param [contextLost=false] - If context was lost, we suppress `gl.delete` calls */ destroyAll(contextLost?: boolean): void; /** * Activate vertex array object. * @param geometry - Geometry instance. * @param program - Shader program instance. */ protected activateVao(geometry: Geometry, program: GlProgram): void; /** * Draws the currently bound geometry. * @param topology - The type primitive to render. * @param size - The number of elements to be rendered. If not specified, all vertices after the * starting vertex will be drawn. * @param start - The starting vertex in the geometry to start drawing from. If not specified, * drawing will start from the first vertex. * @param instanceCount - The number of instances of the set of elements to execute. If not specified, * all instances will be drawn. * @returns This instance of the geometry system. */ draw(topology?: Topology, size?: number, start?: number, instanceCount?: number): this; /** Unbind/reset everything. */ protected unbind(): void; destroy(): void; } /** * @param format * @internal */ export declare function getGlTypeFromFormat(format: VertexFormat): number; /** * The options for the back buffer system. * @category rendering * @property {boolean} [useBackBuffer=false] - if true will use the back buffer where required * @property {boolean} [antialias=false] - if true will ensure the texture is antialiased * @advanced */ export interface GlBackBufferOptions { /** * if true will use the back buffer where required * @default false */ useBackBuffer?: boolean; /** if true will ensure the texture is antialiased */ antialias?: boolean; } /** * For blend modes you need to know what pixels you are actually drawing to. For this to be possible in WebGL * we need to render to a texture and then present that texture to the screen. This system manages that process. * * As the main scene is rendered to a texture, it means we can sample it and copy its pixels, * something not possible on the main canvas. * * If antialiasing is set to to true and useBackBuffer is set to true, then the back buffer will be antialiased. * and the main gl context will not. * * You only need to activate this back buffer if you are using a blend mode that requires it. * * to activate is simple, you pass `useBackBuffer:true` to your render options * @category rendering * @advanced */ export declare class GlBackBufferSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "backBuffer"; readonly priority: 1; }; /** default options for the back buffer system */ static defaultOptions: GlBackBufferOptions; /** if true, the back buffer is used */ useBackBuffer: boolean; private _backBufferTexture; private readonly _renderer; private _targetTexture; private _useBackBufferThisRender; private _antialias; private _state; private _bigTriangleShader; constructor(renderer: WebGLRenderer); init(options?: GlBackBufferOptions): void; /** * This is called before the RenderTargetSystem is started. This is where * we replace the target with the back buffer if required. * @param options - The options for this render. */ protected renderStart(options: RenderOptions): void; protected renderEnd(): void; private _presentBackBuffer; private _getBackBufferTexture; /** destroys the back buffer */ destroy(): void; } /** * The system that handles color masking for the WebGL. * @category rendering * @advanced */ export declare class GlColorMaskSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "colorMask"; }; private readonly _renderer; private _colorMaskCache; constructor(renderer: WebGLRenderer); setMask(colorMask: number): void; destroy?: () => void; } /** * The system that handles encoding commands for the WebGL. * @category rendering * @advanced */ export declare class GlEncoderSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "encoder"; }; readonly commandFinished: Promise; private readonly _renderer; constructor(renderer: WebGLRenderer); setGeometry(geometry: Geometry, shader?: Shader): void; finishRenderPass(): void; draw(options: { geometry: Geometry; shader: Shader; state?: State; topology?: Topology; size?: number; start?: number; instanceCount?: number; skipSync?: boolean; }): void; destroy(): void; } /** * The GpuLimitsSystem provides information about the capabilities and limitations of the underlying GPU. * These limits, such as the maximum number of textures that can be used in a shader * (`maxTextures`) or the maximum number of textures that can be batched together (`maxBatchableTextures`), * are determined by the specific graphics hardware and driver. * * The values for these limits are not available immediately upon instantiation of the class. * They are populated when the GL rendering context is successfully initialized and ready, * which occurs after the `renderer.init()` method has completed. * Attempting to access these properties before the context is ready will result in undefined or default values. * * This system allows the renderer to adapt its behavior and resource allocation strategies * to stay within the supported boundaries of the GPU, ensuring optimal performance and stability. * @example * ```ts * const renderer = new WebGlRenderer(); * await renderer.init(); * * console.log(renderer.limits.maxTextures); * ``` * @category rendering * @advanced */ export declare class GlLimitsSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "limits"; }; /** The maximum number of textures that can be used by a shader */ maxTextures: number; /** The maximum number of batchable textures */ maxBatchableTextures: number; /** The maximum number of uniform bindings */ maxUniformBindings: number; private readonly _renderer; constructor(renderer: WebGLRenderer); contextChange(): void; destroy(): void; } /** * This manages the stencil buffer. Used primarily for masking * @category rendering * @advanced */ export declare class GlStencilSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "stencil"; }; private _gl; private readonly _stencilCache; private _renderTargetStencilState; private _stencilOpsMapping; private _comparisonFuncMapping; private _activeRenderTarget; constructor(renderer: WebGLRenderer); protected contextChange(gl: WebGLRenderingContext): void; protected onRenderTargetChange(renderTarget: RenderTarget): void; resetState(): void; setStencilMode(stencilMode: STENCIL_MODES, stencilReference: number): void; destroy?: () => void; } /** * System plugin to the renderer to manage uniform buffers. But with an WGSL adaptor. * @category rendering * @advanced */ export declare class GlUboSystem extends UboSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "ubo"; }; constructor(); } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGL renderer * @category rendering * @ignore */ export declare class GlRenderTargetAdaptor implements RenderTargetAdaptor { private _renderTargetSystem; private _renderer; private _clearColorCache; private _viewPortCache; init(renderer: WebGLRenderer, renderTargetSystem: RenderTargetSystem): void; contextChange(): void; copyToTexture(sourceRenderSurfaceTexture: RenderTarget, destinationTexture: Texture, originSrc: { x: number; y: number; }, size: { width: number; height: number; }, originDest: { x: number; y: number; }): Texture>; startRenderPass(renderTarget: RenderTarget, clear?: CLEAR_OR_BOOL, clearColor?: RgbaArray, viewport?: Rectangle): void; finishRenderPass(renderTarget?: RenderTarget): void; initGpuRenderTarget(renderTarget: RenderTarget): GlRenderTarget; destroyGpuRenderTarget(gpuRenderTarget: GlRenderTarget): void; clear(_renderTarget: RenderTarget, clear: CLEAR_OR_BOOL, clearColor?: RgbaArray): void; resizeGpuRenderTarget(renderTarget: RenderTarget): void; private _initColor; private _resizeColor; private _initStencil; private _resizeStencil; prerender(renderTarget: RenderTarget): void; postrender(renderTarget: RenderTarget): void; } /** * The WebGL adaptor for the render target system. Allows the Render Target System to be used with the WebGl renderer * @category rendering * @advanced */ export declare class GlRenderTargetSystem extends RenderTargetSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "renderTarget"; }; adaptor: GlRenderTargetAdaptor; constructor(renderer: WebGLRenderer); } /** * The precision types available in WebGL shaders. * * These types define the precision of floating-point calculations in shaders. * - `highp`: High precision, typically 32-bit floating-point. * - `mediump`: Medium precision, typically 16-bit floating-point. * - `lowp`: Low precision, typically 8-bit floating-point. * @category rendering * @advanced */ export type PRECISION = `highp` | `mediump` | `lowp`; /** @private */ export declare class IGLUniformData { location: WebGLUniformLocation; value: number | boolean | Float32Array | Int32Array | Uint32Array | boolean[]; } /** * Helper class to create a WebGL Program * @private */ export declare class GlProgramData { /** The shader program. */ program: WebGLProgram; /** * Holds the uniform data which contains uniform locations * and current uniform values used for caching and preventing unneeded GPU commands. */ uniformData: Record; /** * UniformGroups holds the various upload functions for the shader. Each uniform group * and program have a unique upload function generated. */ uniformGroups: Record; /** A hash that stores where UBOs are bound to on the program. */ uniformBlockBindings: Record; /** A hash for lazily-generated uniform uploading functions. */ uniformSync: Record; /** * A place where dirty ticks are stored for groups * If a tick here does not match with the Higher level Programs tick, it means * we should re upload the data. */ uniformDirtyGroups: Record; /** * Makes a new Pixi program. * @param program - webgl program * @param uniformData - uniforms */ constructor(program: WebGLProgram, uniformData: { [key: string]: IGLUniformData; }); /** Destroys this program. */ destroy(): void; } /** @internal */ export interface ShaderSyncData { textureCount: number; blockIndex: number; } /** @internal */ export type ShaderSyncFunction = (renderer: WebGLRenderer, shader: Shader, syncData: ShaderSyncData) => void; /** * System plugin to the renderer to manage the shaders for WebGL. * @category rendering * @advanced */ export declare class GlShaderSystem { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "shader"; }; /** @internal */ _activeProgram: GlProgram; private _programDataHash; private readonly _renderer; /** @internal */ _gl: WebGL2RenderingContext; private _shaderSyncFunctions; constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; /** * Changes the current shader to the one given in parameter. * @param shader - the new shader * @param skipSync - false if the shader should automatically sync its uniforms. * @returns the glProgram that belongs to the shader. */ bind(shader: Shader, skipSync?: boolean): void; /** * Updates the uniform group. * @param uniformGroup - the uniform group to update */ updateUniformGroup(uniformGroup: UniformGroup): void; /** * Binds a uniform block to the shader. * @param uniformGroup - the uniform group to bind * @param name - the name of the uniform block * @param index - the index of the uniform block */ bindUniformBlock(uniformGroup: UniformGroup | BufferResource, name: string, index?: number): void; private _setProgram; /** * @param program - the program to get the data for * @internal */ _getProgramData(program: GlProgram): GlProgramData; private _createProgramData; destroy(): void; /** * Creates a function that can be executed that will sync the shader as efficiently as possible. * Overridden by the unsafe eval package if you don't want eval used in your project. * @param shader - the shader to generate the sync function for * @param shaderSystem - the shader system to use * @returns - the generated sync function * @ignore */ _generateShaderSync(shader: Shader, shaderSystem: GlShaderSystem): ShaderSyncFunction; resetState(): void; } /** * Generates the a function that will efficiently sync shader resources with the GPU. * @param shader - The shader to generate the code for * @param shaderSystem - An instance of the shader system * @internal */ export declare function generateShaderSyncCode(shader: Shader, shaderSystem: GlShaderSystem): ShaderSyncFunction; /** * Automatically generates a uniform group that holds the texture samplers for a shader. * This is used mainly by the shaders that batch textures! * @param maxTextures - the number of textures that this uniform group will contain. * @returns a uniform group that holds the texture samplers. * @internal */ export declare function getBatchSamplersUniformGroup(maxTextures: number): UniformGroup; /** * System plugin to the renderer to manage shaders. * @category rendering * @advanced */ export declare class GlUniformGroupSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "uniformGroup"; }; /** * The current WebGL rendering context. * @type {WebGLRenderingContext} */ protected gl: GlRenderingContext; /** Cache to holds the generated functions. Stored against UniformObjects unique signature. */ private _cache; private _renderer; private _uniformGroupSyncHash; /** @param renderer - The renderer this System works for. */ constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; /** * Uploads the uniforms values to the currently bound shader. * @param group - the uniforms values that be applied to the current shader * @param program * @param syncData * @param syncData.textureCount */ updateUniformGroup(group: UniformGroup, program: GlProgram, syncData: { textureCount: number; }): void; /** * Overridable by the pixi.js/unsafe-eval package to use static syncUniforms instead. * @param group * @param program */ private _getUniformSyncFunction; private _createUniformSyncFunction; private _generateUniformsSync; /** * Takes a uniform group and data and generates a unique signature for them. * @param group - The uniform group to get signature of * @param group.uniforms * @param uniformData - Uniform information generated by the shader * @param preFix * @returns Unique signature of the uniform group */ private _getSignature; /** Destroys this System and removes all its textures. */ destroy(): void; } /** * @param fragmentShader * @internal */ export declare function migrateFragmentFromV7toV8(fragmentShader: string): string; /** * @private * @param {WebGLRenderingContext} gl - The current WebGL context {WebGLProgram} * @param {number} type - the type, can be either VERTEX_SHADER or FRAGMENT_SHADER * @param {string} src - The vertex shader source as an array of strings. * @returns {WebGLShader} the shader */ export declare function compileShader(gl: WebGLRenderingContextBase, type: number, src: string): WebGLShader; /** * @param {string} type - Type of value * @param {number} size * @private */ export declare function defaultValue(type: string, size: number): number | Float32Array | Int32Array | Uint32Array | boolean | boolean[]; /** * This function looks at the attribute information provided to the geometry and attempts * to fill in any gaps. We do this by looking at the extracted data from the shader and * making best guesses. * * Most of the time users don't need to provide all the attribute info beyond the data itself, so we * can fill in the gaps for them. If you are using attributes in a more advanced way, * don't forget to add all the info at creation! * @param geometry - the geometry to ensure attributes for * @param extractedData - the extracted data from the shader * @internal */ export declare function ensureAttributes(geometry: Geometry, extractedData: Record): void; /** * generates a WebGL Program object from a high level Pixi Program. * @param gl - a rendering context on which to generate the program * @param program - the high level Pixi Program. * @private */ export declare function generateProgram(gl: GlRenderingContext, program: GlProgram): GlProgramData; /** @internal */ export declare function getMaxFragmentPrecision(): PRECISION; /** * returns a little WebGL context to use for program inspection. * @private * @returns {WebGLRenderingContext} a gl context to test with */ export declare function getTestContext(): GlRenderingContext; /** * returns the uniform block data from the program * @private * @param program - the webgl program * @param gl - the WebGL context * @returns {object} the uniform data for this program */ export declare function getUboData(program: WebGLProgram, gl: WebGL2RenderingContext): Record; /** * returns the uniform data from the program * @private * @param program - the webgl program * @param gl - the WebGL context * @returns {object} the uniform data for this program */ export declare function getUniformData(program: WebGLProgram, gl: WebGLRenderingContextBase): { [key: string]: GlUniformData; }; /** * * logs out any program errors * @param gl - The current WebGL context * @param program - the WebGL program to display errors for * @param vertexShader - the fragment WebGL shader program * @param fragmentShader - the vertex WebGL shader program * @private */ export declare function logProgramError(gl: WebGLRenderingContext, program: WebGLProgram, vertexShader: WebGLShader, fragmentShader: WebGLShader): void; /** * @private * @param {string} type */ export declare function mapSize(type: string): number; /** * @param gl * @param type * @internal */ export declare function mapType(gl: any, type: number): string; /** * @param gl * @param type * @internal */ export declare function mapGlToVertexFormat(gl: any, type: number): VertexFormat; /** * @param src * @param isES300 * @param isFragment * @internal */ export declare function addProgramDefines(src: string, isES300: boolean, isFragment?: boolean): string; interface EnsurePrecisionOptions { requestedVertexPrecision: PRECISION; requestedFragmentPrecision: PRECISION; maxSupportedVertexPrecision: PRECISION; maxSupportedFragmentPrecision: PRECISION; } /** * Sets the float precision on the shader, ensuring the device supports the request precision. * If the precision is already present, it just ensures that the device is able to handle it. * @param src * @param options * @param options.requestedVertexPrecision * @param options.requestedFragmentPrecision * @param options.maxSupportedVertexPrecision * @param options.maxSupportedFragmentPrecision * @param isFragment * @private */ export declare function ensurePrecision(src: string, options: EnsurePrecisionOptions, isFragment: boolean): string; /** * @param src * @param isES300 * @internal */ export declare function insertVersion(src: string, isES300: boolean): string; /** * @param src * @param root0 * @param root0.name * @param isFragment * @internal */ export declare function setProgramName(src: string, { name }: { name: string; }, isFragment?: boolean): string; /** * @param src * @param isES300 * @internal */ export declare function stripVersion(src: string, isES300: boolean): string; /** @internal */ export declare const WGSL_TO_STD40_SIZE: Record; /** * @param uniformData * @internal */ export declare function createUboElementsSTD40(uniformData: UniformData[]): UboLayout; /** * @param uboElements * @internal */ export declare function createUboSyncFunctionSTD40(uboElements: UboElement[]): UniformsSyncCallback; /** * This generates a function that will sync an array to the uniform buffer * following the std140 layout * @param uboElement - the element to generate the array sync for * @param offsetToAdd - the offset to append at the start of the code * @returns - the generated code * @internal */ export declare function generateArraySyncSTD40(uboElement: UboElement, offsetToAdd: number): string; /** * @param group * @param uniformData * @internal */ export declare function generateUniformsSync(group: UniformGroup, uniformData: Record): UniformsSyncCallback; /** @internal */ export declare const UNIFORM_TO_SINGLE_SETTERS: Record; /** @internal */ export declare const UNIFORM_TO_ARRAY_SETTERS: Record; /** * System plugin to the renderer to manage WebGL state machines * @category rendering * @advanced */ export declare class GlStateSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "state"; }; /** * State ID * @readonly */ stateId: number; /** * Polygon offset * @readonly */ polygonOffset: number; /** * Blend mode * @default 'none' * @readonly */ blendMode: BLEND_MODES; /** Whether current blend equation is different */ protected _blendEq: boolean; /** * GL context * @type {WebGLRenderingContext} * @readonly */ protected gl: GlRenderingContext; protected blendModesMap: Record; /** * Collection of calls * @type {Function[]} */ protected readonly map: ((value: boolean) => void)[]; /** * Collection of check calls * @type {Function[]} */ protected readonly checks: ((system: this, state: State) => void)[]; /** * Default WebGL State * @readonly */ protected defaultState: State; /** * Whether to invert the front face when rendering * This is used for render textures where the Y-coordinate is flipped * @default false */ private _invertFrontFace; private _glFrontFace; private _cullFace; private _frontFaceDirty; private _frontFace; constructor(renderer: WebGLRenderer); protected onRenderTargetChange(renderTarget: RenderTarget): void; protected contextChange(gl: GlRenderingContext): void; /** * Sets the current state * @param {*} state - The state to set. */ set(state: State): void; /** * Sets the state, when previous state is unknown. * @param {*} state - The state to set */ forceState(state: State): void; /** * Sets whether to enable or disable blending. * @param value - Turn on or off WebGl blending. */ setBlend(value: boolean): void; /** * Sets whether to enable or disable polygon offset fill. * @param value - Turn on or off webgl polygon offset testing. */ setOffset(value: boolean): void; /** * Sets whether to enable or disable depth test. * @param value - Turn on or off webgl depth testing. */ setDepthTest(value: boolean): void; /** * Sets whether to enable or disable depth mask. * @param value - Turn on or off webgl depth mask. */ setDepthMask(value: boolean): void; /** * Sets whether to enable or disable cull face. * @param {boolean} value - Turn on or off webgl cull face. */ setCullFace(value: boolean): void; /** * Sets the gl front face. * @param {boolean} value - true is clockwise and false is counter-clockwise */ setFrontFace(value: boolean): void; /** * Sets the blend mode. * @param {number} value - The blend mode to set to. */ setBlendMode(value: BLEND_MODES): void; /** * Sets the polygon offset. * @param {number} value - the polygon offset * @param {number} scale - the polygon offset scale */ setPolygonOffset(value: number, scale: number): void; /** Resets all the logic and disables the VAOs. */ resetState(): void; /** * Checks to see which updates should be checked based on which settings have been activated. * * For example, if blend is enabled then we should check the blend modes each time the state is changed * or if polygon fill is activated then we need to check if the polygon offset changes. * The idea is that we only check what we have too. * @param func - the checking function to add or remove * @param value - should the check function be added or removed. */ private _updateCheck; /** * A private little wrapper function that we call to check the blend mode. * @param system - the System to perform the state check on * @param state - the state that the blendMode will pulled from */ private static _checkBlendMode; /** * A private little wrapper function that we call to check the polygon offset. * @param system - the System to perform the state check on * @param state - the state that the blendMode will pulled from */ private static _checkPolygonOffset; /** @ignore */ destroy(): void; } /** * Maps gl blend combinations to WebGL. * @param gl * @returns {object} Map of gl blend combinations to WebGL. * @internal */ export declare function mapWebGLBlendModesToPixi(gl: GlRenderingContext): Record; /** * Various GL texture/resources formats. * @category rendering * @advanced */ export declare enum GL_FORMATS { RGBA = 6408, RGB = 6407, RG = 33319, RED = 6403, RGBA_INTEGER = 36249, RGB_INTEGER = 36248, RG_INTEGER = 33320, RED_INTEGER = 36244, ALPHA = 6406, LUMINANCE = 6409, LUMINANCE_ALPHA = 6410, DEPTH_COMPONENT = 6402, DEPTH_STENCIL = 34041 } /** * Various GL target types. * @category rendering * @advanced */ export declare enum GL_TARGETS { TEXTURE_2D = 3553, TEXTURE_CUBE_MAP = 34067, TEXTURE_2D_ARRAY = 35866, TEXTURE_CUBE_MAP_POSITIVE_X = 34069, TEXTURE_CUBE_MAP_NEGATIVE_X = 34070, TEXTURE_CUBE_MAP_POSITIVE_Y = 34071, TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072, TEXTURE_CUBE_MAP_POSITIVE_Z = 34073, TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074 } /** * The wrap modes that are supported by pixi. * * The {@link WRAP_MODE} wrap mode affects the default wrapping mode of future operations. * It can be re-assigned to either CLAMP or REPEAT, depending upon suitability. * If the texture is non power of two then clamp will be used regardless as WebGL can * only use REPEAT if the texture is po2. * * This property only affects WebGL. * @category rendering * @advanced */ export declare enum GL_WRAP_MODES { /** * The textures uvs are clamped * @default 33071 */ CLAMP = 33071, /** * The texture uvs tile and repeat * @default 10497 */ REPEAT = 10497, /** * The texture uvs tile and repeat with mirroring * @default 33648 */ MIRRORED_REPEAT = 33648 } /** @internal */ export declare enum GL_TYPES { /** * 8 bits per channel for gl.RGBA * @default 5121 */ UNSIGNED_BYTE = 5121, /** @default 5123 */ UNSIGNED_SHORT = 5123, /** * 5 red bits, 6 green bits, 5 blue bits. * @default 33635 */ UNSIGNED_SHORT_5_6_5 = 33635, /** * 4 red bits, 4 green bits, 4 blue bits, 4 alpha bits. * @default 32819 */ UNSIGNED_SHORT_4_4_4_4 = 32819, /** * 5 red bits, 5 green bits, 5 blue bits, 1 alpha bit. * @default 32820 */ UNSIGNED_SHORT_5_5_5_1 = 32820, /** @default 5125 */ UNSIGNED_INT = 5125, /** @default 35899 */ UNSIGNED_INT_10F_11F_11F_REV = 35899, /** @default 33640 */ UNSIGNED_INT_2_10_10_10_REV = 33640, /** @default 34042 */ UNSIGNED_INT_24_8 = 34042, /** @default 35902 */ UNSIGNED_INT_5_9_9_9_REV = 35902, /** @default 5120 */ BYTE = 5120, /** @default 5122 */ SHORT = 5122, /** @default 5124 */ INT = 5124, /** @default 5126 */ FLOAT = 5126, /** @default 36269 */ FLOAT_32_UNSIGNED_INT_24_8_REV = 36269, /** @default 36193 */ HALF_FLOAT = 36193 } /** * Internal texture for WebGL context * @category rendering * @ignore */ export declare class GlTexture { target: GL_TARGETS; /** The WebGL texture. */ texture: WebGLTexture; /** Width of texture that was used in texImage2D. */ width: number; /** Height of texture that was used in texImage2D. */ height: number; /** Whether mip levels has to be generated. */ mipmap: boolean; /** Type copied from texture source. */ type: number; /** Type copied from texture source. */ internalFormat: number; /** Type of sampler corresponding to this texture. See {@link SAMPLER_TYPES} */ samplerType: number; format: GL_FORMATS; constructor(texture: WebGLTexture); } /** * The system for managing textures in WebGL. * @category rendering * @advanced */ export declare class GlTextureSystem implements System, CanvasGenerator { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem ]; readonly name: "texture"; }; readonly managedTextures: TextureSource[]; private readonly _renderer; private _glTextures; private _glSamplers; private _boundTextures; private _activeTextureLocation; private _boundSamplers; private readonly _uploads; private _gl; private _mapFormatToInternalFormat; private _mapFormatToType; private _mapFormatToFormat; private _premultiplyAlpha; private readonly _useSeparateSamplers; constructor(renderer: WebGLRenderer); protected contextChange(gl: GlRenderingContext): void; /** * Initializes a texture source, if it has already been initialized nothing will happen. * @param source - The texture source to initialize. * @returns The initialized texture source. */ initSource(source: TextureSource): void; bind(texture: BindableTexture, location?: number): void; bindSource(source: TextureSource, location?: number): void; private _bindSampler; unbind(texture: BindableTexture): void; private _activateLocation; private _initSource; protected onStyleChange(source: TextureSource): void; protected updateStyle(source: TextureSource, firstCreation: boolean): void; protected onSourceUnload(source: TextureSource): void; protected onSourceUpdate(source: TextureSource): void; protected onUpdateMipmaps(source: TextureSource, bind?: boolean): void; protected onSourceDestroy(source: TextureSource): void; private _initSampler; private _getGlSampler; getGlSource(source: TextureSource): GlTexture; generateCanvas(texture: Texture): ICanvas; getPixels(texture: Texture): GetPixelsOutput; destroy(): void; resetState(): void; } /** @internal */ export interface GLTextureUploader { id: string; upload(source: TextureSource, glTexture: GlTexture, gl: GlRenderingContext, webGLVersion: number): void; } /** @internal */ export declare const glUploadBufferImageResource: GLTextureUploader; /** @internal */ export declare const glUploadCompressedTextureResource: GLTextureUploader; /** @internal */ export declare const glUploadImageResource: GLTextureUploader; /** @internal */ export declare const glUploadVideoResource: GLTextureUploader; /** * @param style * @param gl * @param mipmaps * @param anisotropicExt * @param glFunctionName * @param firstParam * @param forceClamp * @param firstCreation * @internal */ export declare function applyStyleParams(style: TextureStyle, gl: WebGL2RenderingContext, mipmaps: boolean, anisotropicExt: EXT_texture_filter_anisotropic, glFunctionName: "samplerParameteri" | "texParameteri", firstParam: 3553 | WebGLSampler, forceClamp: boolean, /** if true we can skip setting certain values if the values is the same as the default gl values */ firstCreation: boolean): void; /** @internal */ export declare function getSupportedGlCompressedTextureFormats(): TEXTURE_FORMATS[]; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param {WebGLRenderingContext} gl - The rendering context. * @returns Lookup table. */ export declare function mapFormatToGlFormat(gl: GlRenderingContext): Record; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param gl - The rendering context. * @param extensions - The WebGL extensions. * @returns Lookup table. */ export declare function mapFormatToGlInternalFormat(gl: GlRenderingContext, extensions: WebGLExtensions): Record; /** * Returns a lookup table that maps each type-format pair to a compatible internal format. * @function mapTypeAndFormatToInternalFormat * @private * @param {WebGLRenderingContext} gl - The rendering context. * @returns Lookup table. */ export declare function mapFormatToGlType(gl: GlRenderingContext): Record; /** @internal */ export declare const scaleModeToGlFilter: { linear: number; nearest: number; }; /** @internal */ export declare const mipmapScaleModeToGlFilter: { linear: { linear: number; nearest: number; }; nearest: { linear: number; nearest: number; }; }; /** @internal */ export declare const wrapModeToGlAddress: { "clamp-to-edge": number; repeat: number; "mirror-repeat": number; }; /** @internal */ export declare const compareModeToGlCompare: { never: number; less: number; equal: number; "less-equal": number; greater: number; "not-equal": number; "greater-equal": number; always: number; }; /** * @param pixels * @internal */ export declare function unpremultiplyAlpha(pixels: Uint8Array | Uint8ClampedArray): void; /** @internal */ export declare class UboBatch { data: Float32Array; private readonly _minUniformOffsetAlignment; byteIndex: number; constructor({ minUniformOffsetAlignment }: { minUniformOffsetAlignment: number; }); clear(): void; addEmptyGroup(size: number): number; addGroup(array: Float32Array): number; destroy(): void; } /** * @param pm * @param x * @param y * @param width * @param height * @param flipY * @internal */ export declare function calculateProjection(pm: Matrix, x: number, y: number, width: number, height: number, flipY: boolean): Matrix; /** @internal */ export declare const WGSL_ALIGN_SIZE_DATA: Record; /** * @param uniformData * @internal */ export declare function createUboElementsWGSL(uniformData: UniformData[]): UboLayout; /** * @param uboElements * @internal */ export declare function createUboSyncFunctionWGSL(uboElements: UboElement[]): UniformsSyncCallback; /** * @param root0 * @param root0.source * @param root0.entryPoint * @internal */ export declare function extractAttributesFromGpuProgram({ source, entryPoint }: ProgramSource): Record; /** * This generates a function that will sync an array to the uniform buffer * following the wgsl layout * @param uboElement - the element to generate the array sync for * @param offsetToAdd - the offset to append at the start of the code * @returns - the generated code * @internal */ export declare function generateArraySyncWGSL(uboElement: UboElement, offsetToAdd: number): string; /** * @param root0 * @param root0.groups * @internal */ export declare function generateGpuLayoutGroups({ groups }: StructsAndGroups): ProgramPipelineLayoutDescription; /** * @param root0 * @param root0.groups * @internal */ export declare function generateLayoutHash({ groups }: StructsAndGroups): ProgramLayout; /** * @param vertexStructsAndGroups * @param fragmentStructsAndGroups * @internal */ export declare function removeStructAndGroupDuplicates(vertexStructsAndGroups: StructsAndGroups, fragmentStructsAndGroups: StructsAndGroups): { structs: { name: string; members: Record; }[]; groups: { group: number; binding: number; name: string; isUniform: boolean; type: string; }[]; }; /** @internal */ export declare const GpuBlendModesToPixi: Partial>; /** * The stencil state for the GPU renderer. * This is used to define how the stencil buffer should be configured. * @category rendering * @advanced */ export interface StencilState { stencilWriteMask?: number; stencilReadMask?: number; stencilFront?: { compare: "always" | "equal" | "not-equal"; passOp: "increment-clamp" | "decrement-clamp" | "keep" | "replace"; }; stencilBack?: { compare: "always" | "equal" | "not-equal"; passOp: "increment-clamp" | "decrement-clamp" | "keep" | "replace"; }; } /** @internal */ export declare const GpuStencilModesToPixi: StencilState[]; /** @internal */ export interface GpuTextureUploader { type: string; upload(source: T, gpuTexture: GPUTexture, gpu: GPU$1): void; } /** @internal */ export declare const gpuUploadBufferImageResource: GpuTextureUploader; /** * A texture source that uses a compressed resource, such as an array of Uint8Arrays. * It is used for compressed textures that can be uploaded to the GPU. * @category rendering * @advanced */ export declare class CompressedSource extends TextureSource { readonly uploadMethodId = "compressed"; constructor(options: TextureSourceOptions); } /** @internal */ export declare const blockDataMap: Record; /** @internal */ export declare const gpuUploadCompressedTextureResource: GpuTextureUploader; /** @internal */ export declare const gpuUploadImageResource: GpuTextureUploader>; /** * A utility type that represents a tuple of length L containing elements of type T. * @category utils * @advanced */ export type ArrayFixed = [ T, ...Array ] & { length: L; }; /** * A dictionary type that maps string keys to values of type T. * @category utils * @advanced */ export type Dict = { [key: string]: T; }; /** * The type of resource used for video textures. * This is typically an HTMLVideoElement. * @category rendering * @advanced */ export type VideoResource = HTMLVideoElement; /** * Options for video sources. * @category rendering * @advanced */ export interface VideoSourceOptions extends TextureSourceOptions { /** If true, the video will start loading immediately. */ autoLoad?: boolean; /** If true, the video will start playing as soon as it is loaded. */ autoPlay?: boolean; /** The number of times a second to update the texture from the video. Leave at 0 to update at every render. */ updateFPS?: number; /** If true, the video will be loaded with the `crossorigin` attribute. */ crossorigin?: boolean | string; /** If true, the video will loop when it ends. */ loop?: boolean; /** If true, the video will be muted. */ muted?: boolean; /** If true, the video will play inline. */ playsinline?: boolean; /** If true, the video will be preloaded. */ preload?: boolean; /** The time in milliseconds to wait for the video to preload before timing out. */ preloadTimeoutMs?: number; /** The alpha mode of the video. */ alphaMode?: ALPHA_MODES; } /** * A texture source that uses a video as its resource. * It automatically resizes the texture based on the video dimensions. * It also provides methods to control playback and handle video events. * This class supports automatic loading, playback, and frame updates. * It can also handle cross-origin videos and provides options for looping, muting, and inline playback. * @category rendering * @advanced */ export declare class VideoSource extends TextureSource { static extension: ExtensionMetadata; /** The default options for video sources. */ static defaultOptions: VideoSourceOptions; /** Whether or not the video is ready to play. */ isReady: boolean; /** The upload method for this texture. */ uploadMethodId: string; /** * When set to true will automatically play videos used by this texture once * they are loaded. If false, it will not modify the playing state. * @default true */ protected autoPlay: boolean; /** * `true` to use Ticker.shared to auto update the base texture. * @default true */ private _autoUpdate; /** * `true` if the instance is currently connected to Ticker.shared to auto update the base texture. * @default false */ private _isConnectedToTicker; /** * Promise when loading. * @default null */ private _load; private _msToNextUpdate; private _preloadTimeout; /** Callback when completed with load. */ private _resolve; private _reject; private _updateFPS; private _videoFrameRequestCallbackHandle; constructor(options: VideoSourceOptions); /** Update the video frame if the source is not destroyed and meets certain conditions. */ protected updateFrame(): void; /** Callback to update the video frame and potentially request the next frame update. */ private _videoFrameRequestCallback; /** * Checks if the resource has valid dimensions. * @returns {boolean} True if width and height are set, otherwise false. */ get isValid(): boolean; /** * Start preloading the video resource. * @returns {Promise} Handle the validate event */ load(): Promise; /** * Handle video error events. * @param event - The error event */ private _onError; /** * Checks if the underlying source is playing. * @returns True if playing. */ private _isSourcePlaying; /** * Checks if the underlying source is ready for playing. * @returns True if ready. */ private _isSourceReady; /** Runs the update loop when the video is ready to play. */ private _onPlayStart; /** Stops the update loop when a pause event is triggered. */ private _onPlayStop; /** Handles behavior when the video completes seeking to the current playback position. */ private _onSeeked; private _onCanPlay; private _onCanPlayThrough; /** Fired when the video is loaded and ready to play. */ private _mediaReady; /** Cleans up resources and event listeners associated with this texture. */ destroy(): void; /** Should the base texture automatically update itself, set to true by default. */ get autoUpdate(): boolean; set autoUpdate(value: boolean); /** * How many times a second to update the texture from the video. * Leave at 0 to update at every render. * A lower fps can help performance, as updating the texture at 60fps on a 30ps video may not be efficient. */ get updateFPS(): number; set updateFPS(value: number); /** * Configures the updating mechanism based on the current state and settings. * * This method decides between using the browser's native video frame callback or a custom ticker * for updating the video frame. It ensures optimal performance and responsiveness * based on the video's state, playback status, and the desired frames-per-second setting. * * - If `_autoUpdate` is enabled and the video source is playing: * - It will prefer the native video frame callback if available and no specific FPS is set. * - Otherwise, it will use a custom ticker for manual updates. * - If `_autoUpdate` is disabled or the video isn't playing, any active update mechanisms are halted. */ private _configureAutoUpdate; /** * Map of video MIME types that can't be directly derived from file extensions. * @readonly */ static MIME_TYPES: Dict; static test(resource: any): resource is VideoResource; } /** @internal */ export declare const gpuUploadVideoResource: GpuTextureUploader; /** @internal */ export declare function getSupportedGPUCompressedTextureFormats(): Promise; /** * A class which generates mipmaps for a GPUTexture. * Thanks to toji for the original implementation * https://github.com/toji/web-texture-tool/blob/main/src/webgpu-mipmap-generator.js * @category rendering * @ignore */ export declare class GpuMipmapGenerator { device: GPUDevice; sampler: GPUSampler; pipelines: Record; mipmapShaderModule: any; constructor(device: GPUDevice); private _getMipmapPipeline; /** * Generates mipmaps for the given GPUTexture from the data in level 0. * @param {module:External.GPUTexture} texture - Texture to generate mipmaps for. * @returns {module:External.GPUTexture} - The originally passed texture */ generateMipmap(texture: GPUTexture): GPUTexture; } interface AdvancedBlendInstruction extends Instruction { renderPipeId: "blendMode"; blendMode: BLEND_MODES; activeBlend: Renderable[]; } /** * This Pipe handles the blend mode switching of the renderer. * It will insert instructions into the {@link InstructionSet} to switch the blend mode according to the * blend modes of the scene graph. * * This pipe is were wwe handle Advanced blend modes. Advanced blend modes essentially wrap the renderables * in a filter that applies the blend mode. * * You only need to use this class if you are building your own render instruction set rather than letting PixiJS build * the instruction set for you by traversing the scene graph * @category rendering * @internal */ export declare class BlendModePipe implements InstructionPipe { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLPipes, ExtensionType.WebGPUPipes, ExtensionType.CanvasPipes ]; readonly name: "blendMode"; }; private _renderer; private _renderableList; private _activeBlendMode; private _isAdvanced; private _filterHash; constructor(renderer: Renderer); prerender(): void; /** * This ensures that a blendMode switch is added to the instruction set if the blend mode has changed. * @param renderable - The renderable we are adding to the instruction set * @param blendMode - The blend mode of the renderable * @param instructionSet - The instruction set we are adding to */ setBlendMode(renderable: Renderable, blendMode: BLEND_MODES, instructionSet: InstructionSet): void; private _beginAdvancedBlendMode; private _endAdvancedBlendMode; /** * called when the instruction build process is starting this will reset internally to the default blend mode * @internal */ buildStart(): void; /** * called when the instruction build process is finished, ensuring that if there is an advanced blend mode * active, we add the final render instructions added to the instruction set * @param instructionSet - The instruction set we are adding to * @internal */ buildEnd(instructionSet: InstructionSet): void; /** @internal */ destroy(): void; } /** * Copies from one buffer to another. * This is an optimised function that will use `Float64Array` window. * This means it can copy twice as fast! * @param sourceBuffer - the array buffer to copy from * @param destinationBuffer - the array buffer to copy to * @private */ export declare function fastCopy(sourceBuffer: ArrayBuffer, destinationBuffer: ArrayBuffer): void; declare const imageTypes: { png: string; jpg: string; webp: string; }; type Formats = keyof typeof imageTypes; /** * Options for creating an image from a renderer. * Controls the output format and quality of extracted images. * @example * ```ts * // Extract as PNG (default) * const pngImage = await renderer.extract.image({ * target: sprite, * format: 'png' * }); * * // Extract as JPEG with quality setting * const jpgImage = await renderer.extract.image({ * target: sprite, * format: 'jpg', * quality: 0.8 * }); * * // Extract as WebP for better compression * const webpImage = await renderer.extract.image({ * target: sprite, * format: 'webp', * quality: 0.9 * }); * ``` * @category rendering * @advanced */ export interface ImageOptions { /** * The format of the extracted image. * - 'png': Lossless format, best for images with text or sharp edges * - 'jpg': Lossy format, smaller file size, good for photos * - 'webp': Modern format with better compression * @example * ```ts * // Extract as PNG * const pngImage = await renderer.extract.image({ * target: sprite, * format: 'png' * }); * // Extract as JPEG * const jpgImage = await renderer.extract.image({ * target: sprite, * format: 'jpg', * }); * ``` * @default 'png' */ format?: Formats; /** * The quality of the extracted image, between 0 and 1. * Only applies to lossy formats (jpg, webp). * - 1: Maximum quality * - 0: Maximum compression * @example * ```ts * // Extract as JPEG with 80% quality * const jpgImage = await renderer.extract.image({ * target: sprite, * format: 'jpg', * quality: 0.8 * }); * // Extract as WebP with 90% quality * const webpImage = await renderer.extract.image({ * target: sprite, * format: 'webp', * quality: 0.9 * }); * ``` * @default 1 */ quality?: number; } /** * Options for extracting content from a renderer. * These options control how content is extracted and processed from the renderer. * @example * ```ts * // Basic extraction * const pixels = renderer.extract.pixels({ * target: sprite, * }); * * // Extract with custom region and resolution * const canvas = renderer.extract.canvas({ * target: container, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2, * }); * * // Extract with background color and anti-aliasing * const image = await renderer.extract.image({ * target: graphics, * clearColor: '#ff0000', * antialias: true * }); * ``` * @category rendering * @advanced */ export interface BaseExtractOptions { /** * The target to extract. Can be a Container or Texture. * @example * ```ts * // Extract from a sprite * const sprite = new Sprite(texture); * renderer.extract.pixels({ target: sprite }); * * // Extract from a texture directly * renderer.extract.pixels({ target: texture }); * ``` */ target: Container | Texture; /** * The region of the target to extract. If not specified, extracts the entire target. * @example * ```ts * // Extract a specific region * renderer.extract.canvas({ * target: sprite, * frame: new Rectangle(10, 10, 100, 100) * }); * ``` */ frame?: Rectangle; /** * The resolution of the extracted content. Higher values create sharper images. * @default 1 * @example * ```ts * // Extract at 2x resolution for retina displays * renderer.extract.image({ * target: sprite, * resolution: 2 * }); * ``` */ resolution?: number; /** * The color used to clear the extracted content before rendering. * Can be a hex number, string, or array of numbers. * @example * ```ts * // Clear with red background * renderer.extract.canvas({ * target: sprite, * clearColor: '#ff0000' * }); * * // Clear with semi-transparent black * renderer.extract.canvas({ * target: sprite, * clearColor: [0, 0, 0, 0.5] * }); * ``` */ clearColor?: ColorSource; /** * Whether to enable anti-aliasing during extraction. * Improves quality but may affect performance. * @default false * @example * ```ts * // Enable anti-aliasing for smoother edges * renderer.extract.image({ * target: graphics, * antialias: true * }); * ``` */ antialias?: boolean; } /** * Options for extracting an HTMLImage from the renderer. * Combines base extraction options with image-specific settings. * @example * ```ts * // Basic PNG extraction * const image = await renderer.extract.image({ * target: sprite, * format: 'png' * }); * * // High-quality JPEG with custom region * const image = await renderer.extract.image({ * target: container, * format: 'jpg', * quality: 0.9, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2 * }); * * // WebP with background and anti-aliasing * const image = await renderer.extract.image({ * target: graphics, * format: 'webp', * quality: 0.8, * clearColor: '#ff0000', * antialias: true * }); * ``` * * Combines all options from: * - {@link BaseExtractOptions} for basic extraction settings * - {@link ImageOptions} for image format and quality settings * * Common use cases: * - Capturing game screenshots * - Saving rendered content * - Creating image thumbnails * - Exporting canvas content * @see {@link ExtractSystem.image} For the method that uses these options * @see {@link ExtractSystem.base64} For base64 encoding * @category rendering * @advanced * @interface */ export type ExtractImageOptions = BaseExtractOptions & ImageOptions; /** * Options for extracting and downloading content from a renderer. * Combines base extraction options with download-specific settings. * @example * ```ts * // Basic download with default filename * renderer.extract.download({ * target: sprite * }); * * // Download with custom filename and region * renderer.extract.download({ * target: container, * filename: 'screenshot.png', * frame: new Rectangle(0, 0, 100, 100) * }); * * // Download with high resolution and background * renderer.extract.download({ * target: stage, * filename: 'hd-capture.png', * resolution: 2, * clearColor: '#ff0000' * }); * * // Download with anti-aliasing * renderer.extract.download({ * target: graphics, * filename: 'smooth.png', * antialias: true * }); * ``` * * Combines all options from: * - {@link BaseExtractOptions} for basic extraction settings * - Additional download-specific options * * Common use cases: * - Saving game screenshots * - Exporting rendered content * - Creating downloadable assets * - Saving canvas state * @see {@link ExtractSystem.download} For the method that uses these options * @see {@link ExtractSystem.image} For creating images without download * @category rendering * @advanced * @interface */ export type ExtractDownloadOptions = BaseExtractOptions & { /** * The filename to use when downloading the content. * Should include the desired file extension (e.g., .png). * @default 'image.png' * @example * ```ts * renderer.extract.download({ * target: sprite, * filename: 'my-screenshot.png' * }); * ``` */ filename: string; }; /** * Options for extracting content from a renderer. Represents a union of all possible extraction option types. * Used by various extraction methods to support different output formats and configurations. * @example * ```ts * // Basic canvas extraction * const canvas = renderer.extract.canvas({ * target: sprite * }); * * // Image extraction with format * const image = await renderer.extract.image({ * target: sprite, * format: 'png', * quality: 1 * }); * * // Download with filename * renderer.extract.download({ * target: sprite, * filename: 'screenshot.png' * }); * * // Advanced extraction with multiple options * const image = await renderer.extract.image({ * target: container, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2, * clearColor: '#ff0000', * antialias: true, * format: 'webp', * quality: 0.8 * }); * ``` * * Supports three types of options: * - {@link BaseExtractOptions} - Basic extraction settings * - {@link ExtractImageOptions} - Image-specific settings with format and quality * - {@link ExtractDownloadOptions} - Download settings with filename * * Common use cases: * - Extracting raw pixels * - Creating canvas elements * - Generating downloadable images * - Taking screenshots * - Creating thumbnails * @see {@link ExtractSystem.canvas} For canvas extraction * @see {@link ExtractSystem.image} For image extraction * @see {@link ExtractSystem.download} For downloading content * @category rendering * @advanced */ export type ExtractOptions = BaseExtractOptions | ExtractImageOptions | ExtractDownloadOptions; /** * System for exporting content from a renderer. It provides methods to extract content as images, * canvases, or raw pixel data. Available through `renderer.extract`. * @example * ```ts * import { Application, Graphics } from 'pixi.js'; * * // Create a new application * const app = new Application(); * await app.init(); * * // Draw something to extract * const graphics = new Graphics() * .circle(0, 0, 50) * .fill(0xFF0000); * * // Basic extraction examples * const image = await app.renderer.extract.image(graphics); // As IImage (HTMLImageElement) * const canvas = app.renderer.extract.canvas(graphics); // As Canvas * const pixels = app.renderer.extract.pixels(graphics); // As pixel data * const base64 = await app.renderer.extract.base64(graphics); // As base64 string * * // Advanced extraction with options * const customImage = await app.renderer.extract.image({ * target: graphics, * format: 'png', * resolution: 2, * frame: new Rectangle(0, 0, 100, 100), * clearColor: '#00000000' * }); * * // Download content * app.renderer.extract.download({ * target: graphics, * filename: 'my-image.png' * }); * * // Debug visualization * app.renderer.extract.log(graphics); * ``` * * Features: * - Extract as various formats (PNG, JPEG, WebP) * - Control output quality and resolution * - Extract specific regions * - Download extracted content * - Debug visualization * * Common Use Cases: * - Creating thumbnails * - Saving game screenshots * - Processing visual content * - Debugging renders * - Creating textures from rendered content * * Performance Considerations: * - Extraction operations are relatively expensive * - Consider caching results for frequently used content * - Be mindful of resolution and format choices * - Large extractions may impact performance * @category rendering * @standard */ export declare class ExtractSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "extract"; }; /** * Default options for image extraction. * @example * ```ts * // Customize default options * ExtractSystem.defaultImageOptions.format = 'webp'; * ExtractSystem.defaultImageOptions.quality = 0.8; * * // Use defaults * const image = await renderer.extract.image(sprite); * ``` */ static defaultImageOptions: ImageOptions; private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: Renderer); private _normalizeOptions; /** * Creates an IImage from a display object or texture. * @param options - Options for creating the image, or the target to extract * @returns Promise that resolves with the generated IImage * @example * ```ts * // Basic usage with a sprite * const sprite = new Sprite(texture); * const image = await renderer.extract.image(sprite); * document.body.appendChild(image); * * // Advanced usage with options * const image = await renderer.extract.image({ * target: container, * format: 'webp', * quality: 0.8, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2, * clearColor: '#ff0000', * antialias: true * }); * * // Extract directly from a texture * const texture = Texture.from('myTexture.png'); * const image = await renderer.extract.image(texture); * ``` * @see {@link ExtractImageOptions} For detailed options * @see {@link ExtractSystem.base64} For base64 string output * @see {@link ExtractSystem.canvas} For canvas output * @see {@link ImageLike} For the image interface * @category rendering */ image(options: ExtractImageOptions | Container | Texture): Promise; /** * Converts the target into a base64 encoded string. * * This method works by first creating * a canvas using `Extract.canvas` and then converting it to a base64 string. * @param options - The options for creating the base64 string, or the target to extract * @returns Promise that resolves with the base64 encoded string * @example * ```ts * // Basic usage with a sprite * const sprite = new Sprite(texture); * const base64 = await renderer.extract.base64(sprite); * console.log(base64); // data:image/png;base64,... * * // Advanced usage with options * const base64 = await renderer.extract.base64({ * target: container, * format: 'webp', * quality: 0.8, * frame: new Rectangle(0, 0, 100, 100), * resolution: 2 * }); * ``` * @throws Will throw an error if the platform doesn't support any of: * - ICanvas.toDataURL * - ICanvas.toBlob * - ICanvas.convertToBlob * @see {@link ExtractImageOptions} For detailed options * @see {@link ExtractSystem.canvas} For canvas output * @see {@link ExtractSystem.image} For HTMLImage output * @category rendering */ base64(options: ExtractImageOptions | Container | Texture): Promise; /** * Creates a Canvas element, renders the target to it and returns it. * This method is useful for creating static images or when you need direct canvas access. * @param options - The options for creating the canvas, or the target to extract * @returns A Canvas element with the texture rendered on * @example * ```ts * // Basic canvas extraction from a sprite * const sprite = new Sprite(texture); * const canvas = renderer.extract.canvas(sprite); * document.body.appendChild(canvas); * * // Extract with custom region * const canvas = renderer.extract.canvas({ * target: container, * frame: new Rectangle(0, 0, 100, 100) * }); * * // Extract with high resolution * const canvas = renderer.extract.canvas({ * target: sprite, * resolution: 2, * clearColor: '#ff0000' * }); * * // Extract directly from a texture * const texture = Texture.from('myTexture.png'); * const canvas = renderer.extract.canvas(texture); * * // Extract with anti-aliasing * const canvas = renderer.extract.canvas({ * target: graphics, * antialias: true * }); * ``` * @see {@link ExtractOptions} For detailed options * @see {@link ExtractSystem.image} For HTMLImage output * @see {@link ExtractSystem.pixels} For raw pixel data * @category rendering */ canvas(options: ExtractOptions | Container | Texture): ICanvas; /** * Returns a one-dimensional array containing the pixel data of the entire texture in RGBA order, * with integer values between 0 and 255 (inclusive). * > [!NOE] The returned array is a flat Uint8Array where every 4 values represent RGBA * @param options - The options for extracting the image, or the target to extract * @returns One-dimensional Uint8Array containing the pixel data in RGBA format * @example * ```ts * // Basic pixel extraction * const sprite = new Sprite(texture); * const pixels = renderer.extract.pixels(sprite); * console.log(pixels[0], pixels[1], pixels[2], pixels[3]); // R,G,B,A values * * // Extract with custom region * const pixels = renderer.extract.pixels({ * target: sprite, * frame: new Rectangle(0, 0, 100, 100) * }); * * // Extract with high resolution * const pixels = renderer.extract.pixels({ * target: sprite, * resolution: 2 * }); * ``` * @see {@link ExtractOptions} For detailed options * @see {@link ExtractSystem.canvas} For canvas output * @see {@link ExtractSystem.image} For image output * @category rendering */ pixels(options: ExtractOptions | Container | Texture): GetPixelsOutput; /** * Creates a texture from a display object or existing texture. * * This is useful for creating * reusable textures from rendered content or making copies of existing textures. * > [!NOTE] The returned texture should be destroyed when no longer needed * @param options - The options for creating the texture, or the target to extract * @returns A new texture containing the extracted content * @example * ```ts * // Basic texture extraction from a sprite * const sprite = new Sprite(texture); * const extractedTexture = renderer.extract.texture(sprite); * * // Extract with custom region * const regionTexture = renderer.extract.texture({ * target: container, * frame: new Rectangle(0, 0, 100, 100) * }); * * // Extract with high resolution * const hiResTexture = renderer.extract.texture({ * target: sprite, * resolution: 2, * clearColor: '#ff0000' * }); * * // Create a new sprite from extracted texture * const newSprite = new Sprite( * renderer.extract.texture({ * target: graphics, * antialias: true * }) * ); * * // Clean up when done * extractedTexture.destroy(true); * ``` * @see {@link ExtractOptions} For detailed options * @see {@link Texture} For texture management * @see {@link GenerateTextureSystem} For texture generation * @category rendering */ texture(options: ExtractOptions | Container | Texture): Texture; /** * Extracts and downloads content from the renderer as an image file. * This is a convenient way to save screenshots or export rendered content. * > [!NOTE] The download will use PNG format regardless of the filename extension * @param options - The options for downloading and extracting the image, or the target to extract * @example * ```ts * // Basic download with default filename * const sprite = new Sprite(texture); * renderer.extract.download(sprite); // Downloads as 'image.png' * * // Download with custom filename * renderer.extract.download({ * target: sprite, * filename: 'screenshot.png' * }); * * // Download with custom region * renderer.extract.download({ * target: container, * filename: 'region.png', * frame: new Rectangle(0, 0, 100, 100) * }); * * // Download with high resolution and background * renderer.extract.download({ * target: stage, * filename: 'hd-screenshot.png', * resolution: 2, * clearColor: '#ff0000' * }); * * // Download with anti-aliasing * renderer.extract.download({ * target: graphics, * filename: 'smooth.png', * antialias: true * }); * ``` * @see {@link ExtractDownloadOptions} For detailed options * @see {@link ExtractSystem.image} For creating images without download * @see {@link ExtractSystem.canvas} For canvas output * @category rendering */ download(options: ExtractDownloadOptions | Container | Texture): void; /** * Logs the target to the console as an image. This is a useful way to debug what's happening in the renderer. * The image will be displayed in the browser's console using CSS background images. * @param options - The options for logging the image, or the target to log * @param options.width - The width of the logged image preview in the console (in pixels) * @example * ```ts * // Basic usage * const sprite = new Sprite(texture); * renderer.extract.log(sprite); * ``` * @see {@link ExtractSystem.canvas} For getting raw canvas output * @see {@link ExtractSystem.pixels} For raw pixel data * @category rendering * @advanced */ log(options: (ExtractOptions & { width?: number; }) | Container | Texture): void; destroy(): void; } /** * Takes a vertices array and a matrix and transforms the vertices based on the matrix. * this out put is written to the uvs array * @param vertices - the vertices to calculate uvs from * @param verticesStride - the stride of the vertice * @param verticesOffset - the offset of the vertices * @param uvs - the uvs to fill * @param uvsOffset - the offset of the uvs * @param uvsStride - the stride of the uvs * @param size - the size of the vertices * @param matrix - the matrix to apply to the uvs * @internal */ export declare function buildUvs(vertices: number[], verticesStride: number, verticesOffset: number, uvs: number[], uvsOffset: number, uvsStride: number, size: number, matrix?: Matrix): void; /** * @param uvs * @param uvsOffset * @param uvsStride * @param size * @internal */ export declare function buildSimpleUvs(uvs: number[], uvsOffset: number, uvsStride: number, size: number): void; /** * Converts something into a buffer. If it is already a buffer it will pass it through * if it is a number array it will convert it to a float32 array before being passed into a buffer * the buffer will be created with the correct usage flags for geometry attributes * @param buffer - number array * @param index - is this an index buffer? * @returns a buffer * @category rendering * @internal */ export declare function ensureIsBuffer(buffer: Buffer$1 | TypedArray | number[], index: boolean): Buffer$1; /** * @param format * @internal */ export declare function getAttributeInfoFromFormat(format: VertexFormat): { size: number; stride: number; normalised: boolean; }; /** * Gets the 2D bounds of a geometry, based on a specific attribute. * @param geometry - Geometry to to measure * @param attributeId - AttributeId that contains the x,y data * @param bounds - Bounds to store the result in * @returns the bounds * @internal */ export declare function getGeometryBounds(geometry: Geometry, attributeId: string, bounds: Bounds): Bounds; /** * Transforms the vertices in an array with the given matrix. * @param vertices - the vertices to transform * @param m - the matrix to apply to the vertices * @param offset - the offset of the vertices (defaults to 0) * @param stride - the stride of the vertices (defaults to 2) * @param size - the size of the vertices (defaults to vertices.length / stride - offset) * @category rendering * @internal */ export declare function transformVertices(vertices: number[], m: Matrix, offset?: number, stride?: number, size?: number): void; /** * Type definition for the global uniforms used in the renderer. * This includes projection matrix, world transform matrix, world color, and resolution. * @category rendering * @advanced */ export type GlobalUniformGroup = UniformGroup<{ uProjectionMatrix: { value: Matrix; type: "mat3x3"; }; uWorldTransformMatrix: { value: Matrix; type: "mat3x3"; }; uWorldColorAlpha: { value: Float32Array; type: "vec4"; }; uResolution: { value: number[]; type: "vec2"; }; }>; /** * Options for the global uniforms system. * This includes size, projection matrix, world transform matrix, world color, and offset. * @category rendering * @advanced */ export interface GlobalUniformOptions { size?: number[]; projectionMatrix?: Matrix; worldTransformMatrix?: Matrix; worldColor?: number; offset?: PointData; } /** * Data structure for the global uniforms used in the renderer. * This includes the projection matrix, world transform matrix, world color, resolution, and bind group. * @category rendering * @advanced */ export interface GlobalUniformData { projectionMatrix: Matrix; worldTransformMatrix: Matrix; worldColor: number; resolution: number[]; offset: PointData; bindGroup: BindGroup; } /** @internal */ export interface GlobalUniformRenderer { renderTarget: GlRenderTargetSystem | GpuRenderTargetSystem; renderPipes: Renderer["renderPipes"]; ubo: UboSystem; type: RendererType; } /** * System plugin to the renderer to manage global uniforms for the renderer. * @category rendering * @advanced */ export declare class GlobalUniformSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "globalUniforms"; }; private readonly _renderer; private _stackIndex; private _globalUniformDataStack; private readonly _uniformsPool; private readonly _activeUniforms; private readonly _bindGroupPool; private readonly _activeBindGroups; private _currentGlobalUniformData; constructor(renderer: GlobalUniformRenderer); reset(): void; start(options: GlobalUniformOptions): void; bind({ size, projectionMatrix, worldTransformMatrix, worldColor, offset, }: GlobalUniformOptions): void; push(options: GlobalUniformOptions): void; pop(): void; get bindGroup(): BindGroup; get globalUniformData(): GlobalUniformData; get uniformGroup(): UniformGroup; private _createUniforms; destroy(): void; } /** * Checks if the render target is viewable on the screen * Basically, is it a canvas element and is that canvas element in the DOM * @param renderTarget - the render target to check * @returns true if the render target is viewable on the screen * @internal */ export declare function isRenderingToScreen(renderTarget: RenderTarget): boolean; /** * The SchedulerSystem manages scheduled tasks with specific intervals. * @category rendering * @advanced */ export declare class SchedulerSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "scheduler"; readonly priority: 0; }; private readonly _tasks; /** a small off set to apply to the repeat schedules. This is just to make sure they run at slightly different times */ private _offset; /** Initializes the scheduler system and starts the ticker. */ init(): void; /** * Schedules a repeating task. * @param func - The function to execute. * @param duration - The interval duration in milliseconds. * @param useOffset - this will spread out tasks so that they do not all run at the same time * @returns The unique identifier for the scheduled task. */ repeat(func: (elapsed: number) => void, duration: number, useOffset?: boolean): number; /** * Cancels a scheduled task. * @param id - The unique identifier of the task to cancel. */ cancel(id: number): void; /** * Updates and executes the scheduled tasks. * @private */ private _update; /** * Destroys the scheduler system and removes all tasks. * @internal */ destroy(): void; } /** @internal */ export declare enum ShaderStage { VERTEX = 1, FRAGMENT = 2, COMPUTE = 4 } /** * System plugin to the renderer to manage the shaders. * @category rendering * @advanced */ export interface ShaderSystem extends System { /** the maximum number of textures that can be bound to a shader */ readonly maxTextures: number; } /** * @param uboElements * @param parserCode * @param arrayGenerationFunction * @param singleSettersMap * @internal */ export declare function createUboSyncFunction(uboElements: UboElement[], parserCode: "uboWgsl" | "uboStd40", arrayGenerationFunction: (uboElement: UboElement, offsetToAdd: number) => string, singleSettersMap: Record): UniformsSyncCallback; /** * @param {string} type - Type of value * @param {number} size * @private */ export declare function getDefaultUniformValue(type: string, size: number): number | Float32Array | Int32Array | Uint32Array | boolean | boolean[]; /** @internal */ export declare const uboSyncFunctionsSTD40: Record; /** @internal */ export declare const uboSyncFunctionsWGSL: Record; interface UniformParserDefinition { type: UNIFORM_TYPES; test(data: UniformData): boolean; ubo?: string; uboWgsl?: string; uboStd40?: string; uniform?: string; } /** @internal */ export declare const uniformParsers: UniformParserDefinition[]; /** * Options for the startup system. * @property {boolean} [hello=false] - Whether to log the version and type information of renderer to console. * @category rendering * @advanced */ export interface HelloSystemOptions { /** * Whether to log the version and type information of renderer to console. * @default false */ hello: boolean; } /** * A simple system responsible for initiating the renderer. * @category rendering * @advanced */ export declare class HelloSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem, ExtensionType.CanvasSystem ]; readonly name: "hello"; readonly priority: -2; }; /** The default options for the system. */ static defaultOptions: HelloSystemOptions; private readonly _renderer; constructor(renderer: Renderer); /** * It all starts here! This initiates every system, passing in the options for any system by name. * @param options - the config for the renderer and all its systems */ init(options: HelloSystemOptions): void; } /** * Adjusts a blend mode for the current alpha mode. Returns the blend mode that works with that format. * eg 'normal' blend mode will return 'normal-npm' when rendering with premultiplied alpha. * and 'normal' if the texture is already premultiplied (the default) * @param blendMode - The blend mode to get the adjusted blend mode for. * @param textureSource - The texture to test the format of. * @returns - the blend mode that should be used to render this texture correctly based on its alphaMode * @internal */ export declare function getAdjustedBlendModeBlend(blendMode: BLEND_MODES, textureSource: TextureSource): BLEND_MODES; /** * A utility type that represents a canvas and its rendering context. * @category rendering * @internal */ export interface CanvasAndContext { /** The canvas element. */ canvas: ICanvas; /** The rendering context of the canvas. */ context: ICanvasRenderingContext2D; } /** * CanvasPool is a utility class that manages a pool of reusable canvas elements * @category rendering * @internal */ export declare class CanvasPoolClass { canvasOptions: ICanvasRenderingContext2DSettings; /** * Allow renderTextures of the same size as screen, not just pow2 * * Automatically sets to true after `setScreenSize` * @default false */ enableFullScreen: boolean; private _canvasPool; constructor(canvasOptions?: ICanvasRenderingContext2DSettings); /** * Creates texture with params that were specified in pool constructor. * @param pixelWidth - Width of texture in pixels. * @param pixelHeight - Height of texture in pixels. */ private _createCanvasAndContext; /** * Gets a Power-of-Two render texture or fullScreen texture * @param minWidth - The minimum width of the render texture. * @param minHeight - The minimum height of the render texture. * @param resolution - The resolution of the render texture. * @returns The new render texture. */ getOptimalCanvasAndContext(minWidth: number, minHeight: number, resolution?: number): CanvasAndContext; /** * Place a render texture back into the pool. * @param canvasAndContext */ returnCanvasAndContext(canvasAndContext: CanvasAndContext): void; clear(): void; } /** * CanvasPool is a utility class that manages a pool of reusable canvas elements * @category rendering * @internal */ export declare const CanvasPool: CanvasPoolClass; /** * Options for the {@link RenderableGCSystem}. * @category rendering * @property {boolean} [renderableGCActive=true] - If set to true, this will enable the garbage collector on the renderables. * @property {number} [renderableGCAMaxIdle=60000] - * The maximum idle frames before a texture is destroyed by garbage collection. * @property {number} [renderableGCCheckCountMax=60000] - time between two garbage collections. * @advanced */ export interface RenderableGCSystemOptions { /** * If set to true, this will enable the garbage collector on the GPU. * @default true */ renderableGCActive: boolean; /** * The maximum idle frames before a texture is destroyed by garbage collection. * @default 60 * 60 */ renderableGCMaxUnusedTime: number; /** * Frames between two garbage collections. * @default 600 */ renderableGCFrequency: number; } /** * The RenderableGCSystem is responsible for cleaning up GPU resources that are no longer being used. * * When rendering objects like sprites, text, etc - GPU resources are created and managed by the renderer. * If these objects are no longer needed but not properly destroyed (via sprite.destroy()), their GPU resources * would normally leak. This system prevents that by automatically cleaning up unused GPU resources. * * Key features: * - Runs every 30 seconds by default to check for unused resources * - Cleans up resources not rendered for over 1 minute * - Works independently of rendering - will clean up even when not actively rendering * - When cleaned up resources are needed again, new GPU objects are quickly assigned from a pool * - Can be disabled with renderableGCActive:false for manual control * * Best practices: * - Always call destroy() explicitly when done with renderables (e.g. sprite.destroy()) * - This system is a safety net, not a replacement for proper cleanup * - Adjust frequency and timeouts via options if needed * @example * ```js * // Sprite created but reference lost without destroy * let sprite = new Sprite(texture); * * // internally the renderer will assign a resource to the sprite * renderer.render(sprite); * * sprite = null; // Reference lost but GPU resources still exist * * // After 1 minute of not being rendered: * // - RenderableGC will clean up the sprite's GPU resources * // - JS garbage collector can then clean up the sprite itself * ``` * @category rendering * @advanced */ export declare class RenderableGCSystem implements System { /** * Extension metadata for registering this system with the renderer. * @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "renderableGC"; readonly priority: 0; }; /** * Default configuration options for the garbage collection system. * These can be overridden when initializing the renderer. */ static defaultOptions: RenderableGCSystemOptions; /** Maximum time in ms a resource can be unused before being garbage collected */ maxUnusedTime: number; /** Reference to the renderer this system belongs to */ private _renderer; /** Array of renderables being tracked for garbage collection */ private readonly _managedRenderables; /** ID of the main GC scheduler handler */ private _handler; /** How frequently GC runs in ms */ private _frequency; /** Current timestamp used for age calculations */ private _now; /** Array of hash objects being tracked for cleanup */ private readonly _managedHashes; /** ID of the hash cleanup scheduler handler */ private _hashHandler; /** Array of arrays being tracked for cleanup */ private readonly _managedArrays; /** ID of the array cleanup scheduler handler */ private _arrayHandler; /** * Creates a new RenderableGCSystem instance. * @param renderer - The renderer this garbage collection system works for */ constructor(renderer: Renderer); /** * Initializes the garbage collection system with the provided options. * @param options - Configuration options for the renderer */ init(options: RenderableGCSystemOptions): void; /** * Gets whether the garbage collection system is currently enabled. * @returns True if GC is enabled, false otherwise */ get enabled(): boolean; /** * Enables or disables the garbage collection system. * When enabled, schedules periodic cleanup of resources. * When disabled, cancels all scheduled cleanups. */ set enabled(value: boolean); /** * Adds a hash table to be managed by the garbage collector. * @param context - The object containing the hash table * @param hash - The property name of the hash table */ addManagedHash(context: T, hash: string): void; /** * Adds an array to be managed by the garbage collector. * @param context - The object containing the array * @param hash - The property name of the array */ addManagedArray(context: T, hash: string): void; /** * Updates the GC timestamp and tracking before rendering. * @param options - The render options * @param options.container - The container to render */ prerender({ container }: RenderOptions): void; /** * Starts tracking a renderable for garbage collection. * @param renderable - The renderable to track */ addRenderable(renderable: Renderable): void; /** * Performs garbage collection by cleaning up unused renderables. * Removes renderables that haven't been used for longer than maxUnusedTime. */ run(): void; /** Cleans up the garbage collection system. Disables GC and removes all tracked resources. */ destroy(): void; /** * Removes a renderable from being tracked when it's destroyed. * @param renderable - The renderable to stop tracking */ private _removeRenderable; /** * Updates the GC tick counter for a render group and its children. * @param renderGroup - The render group to update * @param gcTick - The new tick value */ private _updateInstructionGCTick; } /** * Options for the {@link TextureGCSystem}. * @category rendering * @advanced */ export interface TextureGCSystemOptions { /** * If set to true, this will enable the garbage collector on the GPU. * @default true */ textureGCActive: boolean; /** * @deprecated since 8.3.0 * @see {@link TextureGCSystemOptions.textureGCMaxIdle} */ textureGCAMaxIdle: number; /** * The maximum idle frames before a texture is destroyed by garbage collection. * @default 60 * 60 */ textureGCMaxIdle: number; /** * Frames between two garbage collections. * @default 600 */ textureGCCheckCountMax: number; } /** * System plugin to the renderer to manage texture garbage collection on the GPU, * ensuring that it does not get clogged up with textures that are no longer being used. * @category rendering * @advanced */ export declare class TextureGCSystem implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "textureGC"; }; /** default options for the TextureGCSystem */ static defaultOptions: TextureGCSystemOptions; /** * Frame count since started. * @readonly */ count: number; /** * Frame count since last garbage collection. * @readonly */ checkCount: number; /** * Maximum idle frames before a texture is destroyed by garbage collection. * @see TextureGCSystem.defaultMaxIdle */ maxIdle: number; /** * Frames between two garbage collections. * @see TextureGCSystem.defaultCheckCountMax */ checkCountMax: number; /** * Current garbage collection mode. * @see TextureGCSystem.defaultMode */ active: boolean; private _renderer; /** @param renderer - The renderer this System works for. */ constructor(renderer: Renderer); init(options: TextureGCSystemOptions): void; /** * Checks to see when the last time a texture was used. * If the texture has not been used for a specified amount of time, it will be removed from the GPU. */ protected postrender(): void; /** * Checks to see when the last time a texture was used. * If the texture has not been used for a specified amount of time, it will be removed from the GPU. */ run(): void; destroy(): void; } /** * Texture pool, used by FilterSystem and plugins. * * Stores collection of temporary pow2 or screen-sized renderTextures * * If you use custom RenderTexturePool for your filters, you can use methods * `getFilterTexture` and `returnFilterTexture` same as in default pool * @category rendering * @advanced */ export declare class TexturePoolClass { /** The default options for texture pool */ textureOptions: TextureSourceOptions; /** The default texture style for the pool */ textureStyle: TextureStyle; /** * Allow renderTextures of the same size as screen, not just pow2 * * Automatically sets to true after `setScreenSize` * @default false */ enableFullScreen: boolean; private _texturePool; private _poolKeyHash; /** * @param textureOptions - options that will be passed to BaseRenderTexture constructor * @param {SCALE_MODE} [textureOptions.scaleMode] - See {@link SCALE_MODE} for possible values. */ constructor(textureOptions?: TextureSourceOptions); /** * Creates texture with params that were specified in pool constructor. * @param pixelWidth - Width of texture in pixels. * @param pixelHeight - Height of texture in pixels. * @param antialias */ createTexture(pixelWidth: number, pixelHeight: number, antialias: boolean): Texture; /** * Gets a Power-of-Two render texture or fullScreen texture * @param frameWidth - The minimum width of the render texture. * @param frameHeight - The minimum height of the render texture. * @param resolution - The resolution of the render texture. * @param antialias * @returns The new render texture. */ getOptimalTexture(frameWidth: number, frameHeight: number, resolution: number, antialias: boolean): Texture; /** * Gets extra texture of the same size as input renderTexture * @param texture - The texture to check what size it is. * @param antialias - Whether to use antialias. * @returns A texture that is a power of two */ getSameSizeTexture(texture: Texture, antialias?: boolean): Texture>; /** * Place a render texture back into the pool. Optionally reset the style of the texture to the default texture style. * useful if you modified the style of the texture after getting it from the pool. * @param renderTexture - The renderTexture to free * @param resetStyle - Whether to reset the style of the texture to the default texture style */ returnTexture(renderTexture: Texture, resetStyle?: boolean): void; /** * Clears the pool. * @param destroyTextures - Destroy all stored textures. */ clear(destroyTextures?: boolean): void; } /** * The default texture pool instance. * @category rendering * @advanced */ export declare const TexturePool: TexturePoolClass; /** * Stores a texture's frame in UV coordinates, in * which everything lies in the rectangle `[(0,0), (1,0), * (1,1), (0,1)]`. * * | Corner | Coordinates | * |--------------|-------------| * | Top-Left | `(x0,y0)` | * | Top-Right | `(x1,y1)` | * | Bottom-Right | `(x2,y2)` | * | Bottom-Left | `(x3,y3)` | * @protected * @category rendering * @advanced */ export declare class TextureUvs { /** X-component of top-left corner `(x0,y0)`. */ x0: number; /** Y-component of top-left corner `(x0,y0)`. */ y0: number; /** X-component of top-right corner `(x1,y1)`. */ x1: number; /** Y-component of top-right corner `(x1,y1)`. */ y1: number; /** X-component of bottom-right corner `(x2,y2)`. */ x2: number; /** Y-component of bottom-right corner `(x2,y2)`. */ y2: number; /** X-component of bottom-left corner `(x3,y3)`. */ x3: number; /** Y-component of bottom-right corner `(x3,y3)`. */ y3: number; uvsFloat32: Float32Array; constructor(); /** * Sets the texture Uvs based on the given frame information. * @protected * @param frame - The frame of the texture * @param baseFrame - The base frame of the texture * @param rotate - Rotation of frame, see {@link groupD8} */ set(frame: Rectangle, baseFrame: Size, rotate: number): void; toString(): string; } /** * @param canvas * @param options * @internal */ export declare function getCanvasTexture(canvas: ICanvas, options?: CanvasSourceOptions): Texture; /** * @param canvas * @internal */ export declare function hasCachedCanvasTexture(canvas: ICanvas): boolean; /** @internal */ export declare function getSupportedCompressedTextureFormats(): Promise; /** @internal */ export declare const nonCompressedFormats: TEXTURE_FORMATS[]; /** @internal */ export declare function getSupportedTextureFormats(): Promise; /** * @param value * @param groupId * @internal */ export declare function createIdFromString(value: string, groupId: string): number; /** * @param fn * @internal */ export declare function parseFunctionBody(fn: (...args: any[]) => any): string; /** @internal */ export interface ViewObserver { onViewUpdate: () => void; } /** * A view is something that is able to be rendered by the renderer. * @category scene * @advanced */ export interface View { /** a unique id for this view */ readonly uid: number; /** whether or not this view should be batched */ batched: boolean; /** * an identifier that is used to identify the type of system that will be used to render this renderable * eg, 'sprite' will use the sprite system (based on the systems name */ readonly renderPipeId: string; /** this is an int because it is packed directly into an attribute in the shader */ _roundPixels: 0 | 1; /** @private */ _lastUsed: number; /** * Whether or not to round the x/y position of the object. * @type {boolean} */ get roundPixels(): boolean; /** if true, the view will have its position rounded to the nearest whole number */ set roundPixels(value: boolean); /** this is the AABB rectangle bounds of the view in local untransformed space. */ bounds: BoundsData; /** Checks if the point is within the view */ containsPoint: (point: Point) => boolean; } declare const DefaultWebGPUSystems: (typeof BackgroundSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof GenerateTextureSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem | typeof GpuUboSystem | typeof GpuEncoderSystem | typeof GpuDeviceSystem | typeof GpuLimitsSystem | typeof GpuBufferSystem | typeof GpuTextureSystem | typeof GpuRenderTargetSystem | typeof GpuShaderSystem | typeof GpuStateSystem | typeof PipelineSystem | typeof GpuColorMaskSystem | typeof GpuStencilSystem | typeof BindGroupSystem)[]; declare const DefaultWebGPUPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe | typeof GpuUniformBatchPipe)[]; /** * The default WebGPU systems. These are the systems that are added by default to the WebGPURenderer. * @category rendering * @standard * @interface */ export type WebGPUSystems = ExtractSystemTypes & PixiMixins.RendererSystems & PixiMixins.WebGPUSystems; /** * The WebGPU renderer pipes. These are used to render the scene. * @see {@link WebGPURenderer} * @internal */ export type WebGPUPipes = ExtractSystemTypes & PixiMixins.RendererPipes & PixiMixins.WebGPUPipes; /** * Options for WebGPURenderer. * @category rendering * @standard */ export interface WebGPUOptions extends SharedRendererOptions, ExtractRendererOptions, PixiMixins.WebGPUOptions { } export interface WebGPURenderer extends AbstractRenderer, WebGPUSystems { } /** * The WebGPU PixiJS Renderer. This renderer allows you to use the next-generation graphics API, WebGPU. * ```ts * // Create a new renderer * const renderer = new WebGPURenderer(); * await renderer.init(); * * // Add the renderer to the stage * document.body.appendChild(renderer.canvas); * * // Create a new stage * const stage = new Container(); * * // Render the stage * renderer.render(stage); * ``` * * You can use {@link autoDetectRenderer} to create a renderer that will automatically detect the best * renderer for the environment. * ```ts * import { autoDetectRenderer } from 'pixi.js'; * // Create a new renderer * const renderer = await autoDetectRenderer(); * ``` * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a WebGPU renderer: * * | WebGPU Core Systems | Systems that are specific to the WebGL renderer | * | ---------------------------------------- | ----------------------------------------------------------------------------- | * | {@link GpuUboSystem} | This manages WebGPU uniform buffer objects feature for shaders | * | {@link GpuEncoderSystem} | This manages the WebGPU command encoder | * | {@link GpuDeviceSystem} | This manages the WebGPU Device and its extensions | * | {@link GpuBufferSystem} | This manages buffers and their GPU resources, keeps everything in sync | * | {@link GpuTextureSystem} | This manages textures and their GPU resources, keeps everything in sync | * | {@link GpuRenderTargetSystem} | This manages what we render too. For example the screen, or another texture | * | {@link GpuShaderSystem} | This manages shaders, programs that run on the GPU to output lovely pixels | * | {@link GpuStateSystem} | This manages the state of the WebGPU Pipelines. eg the various flags that can be set blend modes / depthTesting etc | * | {@link PipelineSystem} | This manages the WebGPU pipelines, used for rendering | * | {@link GpuColorMaskSystem} | This manages the color mask. Used for color masking | * | {@link GpuStencilSystem} | This manages the stencil buffer. Used primarily for masking | * | {@link BindGroupSystem} | This manages the WebGPU bind groups. this is how data is bound to a shader when rendering | * * The breadth of the API surface provided by the renderer is contained within these systems. * @category rendering * @standard * @property {GpuUboSystem} ubo - UboSystem instance. * @property {GpuEncoderSystem} encoder - EncoderSystem instance. * @property {GpuDeviceSystem} device - DeviceSystem instance. * @property {GpuBufferSystem} buffer - BufferSystem instance. * @property {GpuTextureSystem} texture - TextureSystem instance. * @property {GpuRenderTargetSystem} renderTarget - RenderTargetSystem instance. * @property {GpuShaderSystem} shader - ShaderSystem instance. * @property {GpuStateSystem} state - StateSystem instance. * @property {PipelineSystem} pipeline - PipelineSystem instance. * @property {GpuColorMaskSystem} colorMask - ColorMaskSystem instance. * @property {GpuStencilSystem} stencil - StencilSystem instance. * @property {BindGroupSystem} bindGroup - BindGroupSystem instance. * @extends AbstractRenderer */ export declare class WebGPURenderer extends AbstractRenderer implements WebGPUSystems { /** The WebGPU Device. */ gpu: GPU$1; constructor(); } /** * Options for {@link autoDetectRenderer}. * @category rendering * @advanced */ export interface AutoDetectOptions extends RendererOptions { /** The preferred renderer type. WebGPU is recommended as its generally faster than WebGL. */ preference?: "webgl" | "webgpu"; /** Optional WebGPUOptions to pass only to WebGPU renderer. */ webgpu?: Partial; /** Optional WebGLOptions to pass only to the WebGL renderer */ webgl?: Partial; } /** * Automatically determines the most appropriate renderer for the current environment. * * The function will prioritize the WebGL renderer as it is the most tested safe API to use. * In the near future as WebGPU becomes more stable and ubiquitous, it will be prioritized over WebGL. * * The selected renderer's code is then dynamically imported to optimize * performance and minimize the initial bundle size. * * To maximize the benefits of dynamic imports, it's recommended to use a modern bundler * that supports code splitting. This will place the renderer code in a separate chunk, * which is loaded only when needed. * @example * * // create a renderer * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * antialias: true, * }); * * // custom for each renderer * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * webgpu:{ * antialias: true, * backgroundColor: 'red' * }, * webgl:{ * antialias: true, * backgroundColor: 'green' * } * }); * @param options - A partial configuration object based on the `AutoDetectOptions` type. * @returns A Promise that resolves to an instance of the selected renderer. * @category rendering * @standard */ export declare function autoDetectRenderer(options: Partial): Promise; /** * Interface for creating Application plugins. Any plugin that's usable for Application must implement these methods. * * To create a plugin: * 1. Create a class that implements this interface * 2. Add the required static extension property * 3. Register the plugin using extensions.add() * @example * ```ts * import { ApplicationPlugin, ExtensionType, extensions } from 'pixi.js'; * * class MyPlugin { * // Required: Declare the extension type * public static extension = ExtensionType.Application; * * // Required: Implement init method * public static init(options: Partial): void { * // Add properties/methods to the Application instance (this) * Object.defineProperty(this, 'myFeature', { * value: () => console.log('My feature!'), * }); * * // Use options if needed * console.log('Plugin initialized with:', options); * } * * // Required: Implement destroy method * public static destroy(): void { * // Clean up any resources * console.log('Plugin destroyed'); * } * } * * // Register the plugin * extensions.add(MyPlugin); * * // Usage in application * const app = new Application(); * await app.init(); * app.myFeature(); // Output: "My feature!" * ``` * > [!IMPORTANT] * > - Plugins are initialized in the order they are added * > - Plugins are destroyed in reverse order * > - The `this` context in both methods refers to the Application instance * @see {@link ExtensionType} For different types of extensions * @see {@link extensions} For the extension registration system * @see {@link ApplicationOptions} For available application options * @category app * @advanced */ export interface ApplicationPlugin { /** * Called when Application is constructed, scoped to Application instance. * Passes in `options` as the only argument, which are Application `init()` options. * @param {object} options - Application options. */ init(options: Partial): void; /** Called when destroying Application, scoped to Application instance. */ destroy(): void; } /** * Application options supplied to the {@link Application#init} method. * These options configure how your PixiJS application behaves. * @category app * @standard * @example * ```js * import { Application } from 'pixi.js'; * * const app = new Application(); * * // Initialize with common options * await app.init({ * // Rendering options * width: 800, // Canvas width * height: 600, // Canvas height * backgroundColor: 0x1099bb, // Background color * antialias: true, // Enable antialiasing * resolution: window.devicePixelRatio, // Screen resolution * * // Performance options * autoStart: true, // Auto-starts the render loop * sharedTicker: true, // Use shared ticker for better performance * * // Automatic resize options * resizeTo: window, // Auto-resize to window * autoDensity: true, // Adjust for device pixel ratio * * // Advanced options * preference: 'webgl', // Renderer preference ('webgl' or 'webgpu') * powerPreference: 'high-performance' // GPU power preference * }); * ``` * @see {@link WebGLOptions} For resize-related options * @see {@link WebGPUOptions} For resize-related options * @see {@link TickerPlugin} For ticker-related options * @see {@link ResizePlugin} For resize-related options */ export interface ApplicationOptions extends AutoDetectOptions, PixiMixins.ApplicationOptions { } export interface Application extends PixiMixins.Application { } /** * Convenience class to create a new PixiJS application. * * The Application class is the main entry point for creating a PixiJS application. It handles the setup of all core * components needed to start rendering and managing your game or interactive experience. * * Key features: * - Automatically creates and manages the renderer * - Provides a stage (root container) for your display objects * - Handles canvas creation and management * - Supports plugins for extending functionality * - {@link ResizePlugin} for automatic resizing * - {@link TickerPlugin} for managing frame updates * - {@link CullerPlugin} for culling off-screen objects * @example * ```js * import { Assets, Application, Sprite } from 'pixi.js'; * * // Create a new application * const app = new Application(); * * // Initialize with options * await app.init({ * width: 800, // Canvas width * height: 600, // Canvas height * backgroundColor: 0x1099bb, // Background color * antialias: true, // Enable antialiasing * resolution: 1, // Resolution / device pixel ratio * preference: 'webgl', // or 'webgpu' // Renderer preference * }); * * // Add the canvas to your webpage * document.body.appendChild(app.canvas); * * // Start adding content to your application * const texture - await Assets.load('your-image.png'); * const sprite = new Sprite(texture); * app.stage.addChild(sprite); * ``` * > [!IMPORTANT] From PixiJS v8.0.0, the application must be initialized using the async `init()` method * > rather than passing options to the constructor. * @category app * @standard * @see {@link ApplicationOptions} For all available initialization options * @see {@link Container} For information about the stage container * @see {@link Renderer} For details about the rendering system */ export declare class Application { /** * Collection of installed plugins. * @internal */ static _plugins: ApplicationPlugin[]; /** * The root display container for your application. * All visual elements should be added to this container or its children. * @example * ```js * // Create a sprite and add it to the stage * const sprite = Sprite.from('image.png'); * app.stage.addChild(sprite); * * // Create a container for grouping objects * const container = new Container(); * app.stage.addChild(container); * ``` */ stage: Container; /** * The renderer instance that handles all drawing operations. * * Unless specified, it will automatically create a WebGL renderer if available. * If WebGPU is available and the `preference` is set to `webgpu`, it will create a WebGPU renderer. * @example * ```js * // Create a new application * const app = new Application(); * await app.init({ * width: 800, * height: 600, * preference: 'webgl', // or 'webgpu' * }); * * // Access renderer properties * console.log(app.renderer.width, app.renderer.height); * ``` */ renderer: R; /** Create new Application instance */ constructor(); /** @deprecated since 8.0.0 */ constructor(options?: Partial); /** * Initializes the PixiJS application with the specified options. * * This method must be called after creating a new Application instance. * @param options - Configuration options for the application and renderer * @returns A promise that resolves when initialization is complete * @example * ```js * const app = new Application(); * * // Initialize with custom options * await app.init({ * width: 800, * height: 600, * backgroundColor: 0x1099bb, * preference: 'webgl', // or 'webgpu' * }); * ``` */ init(options?: Partial): Promise; /** * Renders the current stage to the screen. * * When using the default setup with {@link TickerPlugin} (enabled by default), you typically don't need to call * this method directly as rendering is handled automatically. * * Only use this method if you've disabled the {@link TickerPlugin} or need custom * render timing control. * @example * ```js * // Example 1: Default setup (TickerPlugin handles rendering) * const app = new Application(); * await app.init(); * // No need to call render() - TickerPlugin handles it * * // Example 2: Custom rendering loop (if TickerPlugin is disabled) * const app = new Application(); * await app.init({ autoStart: false }); // Disable automatic rendering * * function animate() { * app.render(); * requestAnimationFrame(animate); * } * animate(); * ``` */ render(): void; /** * Reference to the renderer's canvas element. This is the HTML element * that displays your application's graphics. * @readonly * @type {HTMLCanvasElement} * @example * ```js * // Create a new application * const app = new Application(); * // Initialize the application * await app.init({...}); * // Add canvas to the page * document.body.appendChild(app.canvas); * * // Access the canvas directly * console.log(app.canvas); // HTMLCanvasElement * ``` */ get canvas(): R["canvas"]; /** * Reference to the renderer's canvas element. * @type {HTMLCanvasElement} * @deprecated since 8.0.0 * @see {@link Application#canvas} */ get view(): R["canvas"]; /** * Reference to the renderer's screen rectangle. This represents the visible area of your application. * * It's commonly used for: * - Setting filter areas for full-screen effects * - Defining hit areas for screen-wide interaction * - Determining the visible bounds of your application * @readonly * @example * ```js * // Use as filter area for a full-screen effect * const blurFilter = new BlurFilter(); * sprite.filterArea = app.screen; * * // Use as hit area for screen-wide interaction * const screenSprite = new Sprite(); * screenSprite.hitArea = app.screen; * * // Get screen dimensions * console.log(app.screen.width, app.screen.height); * ``` * @see {@link Rectangle} For all available properties and methods */ get screen(): Rectangle; /** * Destroys the application and all of its resources. * * This method should be called when you want to completely * clean up the application and free all associated memory. * @param rendererDestroyOptions - Options for destroying the renderer: * - `false` or `undefined`: Preserves the canvas element (default) * - `true`: Removes the canvas element * - `{ removeView: boolean }`: Object with removeView property to control canvas removal * @param options - Options for destroying the application: * - `false` or `undefined`: Basic cleanup (default) * - `true`: Complete cleanup including children * - Detailed options object: * - `children`: Remove children * - `texture`: Destroy textures * - `textureSource`: Destroy texture sources * - `context`: Destroy WebGL context * @example * ```js * // Basic cleanup * app.destroy(); * * // Remove canvas and do complete cleanup * app.destroy(true, true); * * // Remove canvas with explicit options * app.destroy({ removeView: true }, true); * * // Detailed cleanup with specific options * app.destroy( * { removeView: true }, * { * children: true, * texture: true, * textureSource: true, * context: true * } * ); * ``` * > [!WARNING] After calling destroy, the application instance should no longer be used. * > All properties will be null and further operations will throw errors. */ destroy(rendererDestroyOptions?: RendererDestroyOptions, options?: DestroyOptions): void; } declare global { var __PIXI_APP_INIT__: undefined | ((arg: Application | Renderer, version: string) => void); var __PIXI_RENDERER_INIT__: undefined | ((arg: Application | Renderer, version: string) => void); } /** * Calls global __PIXI_APP_INIT__ hook with the application instance, after the application is initialized. * @category app * @internal */ export declare class ApplicationInitHook { /** @ignore */ static extension: ExtensionMetadata; static init(): void; static destroy(): void; } /** * Calls global __PIXI_RENDERER_INIT__ hook with the renderer instance, after the renderer is initialized. * @category rendering * @internal */ export declare class RendererInitHook implements System { /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "initHook"; readonly priority: -10; }; private _renderer; constructor(renderer: Renderer); init(): void; destroy(): void; } /** * Shared systems for the renderer. * @category rendering * @internal */ export declare const SharedSystems: (typeof BackgroundSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof GenerateTextureSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem)[]; /** * Shared render pipes for the renderer. * @category rendering * @internal */ export declare const SharedRenderPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe)[]; /** * Options for the shared systems of a renderer. * @category rendering * @advanced */ export interface SharedRendererOptions extends ExtractRendererOptions, PixiMixins.RendererOptions { /** * Whether to stop PixiJS from dynamically importing default extensions for the renderer. * It is false by default, and means PixiJS will load all the default extensions, based * on the environment e.g browser/webworker. * If you set this to true, then you will need to manually import the systems and extensions you need. * * e.g. * ```js * import 'accessibility'; * import 'app'; * import 'events'; * import 'spritesheet'; * import 'graphics'; * import 'mesh'; * import 'text'; * import 'text-bitmap'; * import 'text-html'; * import { autoDetectRenderer } from 'pixi.js'; * * const renderer = await autoDetectRenderer({ * width: 800, * height: 600, * skipExtensionImports: true, * }); * ``` * @default false */ skipExtensionImports?: boolean; /** * @default true * @deprecated since 8.1.6 * @see `skipExtensionImports` */ manageImports?: boolean; } /** * The configuration for the renderer. * This is used to define the systems and render pipes that will be used by the renderer. * @category rendering * @advanced */ export interface RendererConfig { type: number; name: string; runners?: string[]; systems: { name: string; value: SystemConstructor; }[]; renderPipes: { name: string; value: PipeConstructor; }[]; renderPipeAdaptors: { name: string; value: any; }[]; } /** * The options for rendering a view. * @category rendering * @standard */ export interface RenderOptions extends ClearOptions { /** The container to render. */ container: Container; /** the transform to apply to the container. */ transform?: Matrix; } /** * The options for clearing the render target. * @category rendering * @advanced */ export interface ClearOptions { /** * The render target to render. if this target is a canvas and you are using the WebGL renderer, * please ensure you have set `multiView` to `true` on renderer. */ target?: RenderSurface; /** The color to clear with. */ clearColor?: ColorSource; /** The clear mode to use. */ clear?: CLEAR_OR_BOOL; } /** * Options for destroying the renderer. * This can be a boolean or an object. * @category rendering * @standard */ export type RendererDestroyOptions = TypeOrBool; declare const defaultRunners: readonly [ "init", "destroy", "contextChange", "resolutionChange", "resetState", "renderEnd", "renderStart", "render", "update", "postrender", "prerender" ]; type DefaultRunners = typeof defaultRunners[number]; type Runners = { [key in DefaultRunners]: SystemRunner; } & { [K: ({} & string) | ({} & symbol)]: SystemRunner; }; /** * The base class for a PixiJS Renderer. It contains the shared logic for all renderers. * * You should not use this class directly, but instead use {@link WebGLRenderer} * or {@link WebGPURenderer}. * Alternatively, you can also use {@link autoDetectRenderer} if you want us to * determine the best renderer for you. * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a renderer: * * * | Generic Systems | Systems that manage functionality that all renderer types share | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link ViewSystem} | This manages the main view of the renderer usually a Canvas | * | {@link BackgroundSystem} | This manages the main views background color and alpha | * | {@link EventSystem} | This manages UI events. | * | {@link AccessibilitySystem} | This manages accessibility features. Requires `import 'pixi.js/accessibility'`| * * | Core Systems | Provide an optimised, easy to use API to work with WebGL/WebGPU | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link GlobalUniformSystem} | This manages shaders, programs that run on the GPU to calculate 'em pixels. | * | {@link TextureGCSystem} | This will automatically remove textures from the GPU if they are not used. | * * | PixiJS High-Level Systems | Set of specific systems designed to work with PixiJS objects | * | ------------------------------------ | ----------------------------------------------------------------------------- | * | {@link HelloSystem} | Says hello, buy printing out the pixi version into the console log (along with the renderer type) | * | {@link GenerateTextureSystem} | This adds the ability to generate textures from any Container | * | {@link FilterSystem} | This manages the filtering pipeline for post-processing effects. | * | {@link PrepareSystem} | This manages uploading assets to the GPU. Requires `import 'pixi.js/prepare'`| * | {@link ExtractSystem} | This extracts image data from display objects. | * * The breadth of the API surface provided by the renderer is contained within these systems. * @abstract * @category rendering * @advanced * @property {HelloSystem} hello - HelloSystem instance. * @property {TextureGCSystem} textureGC - TextureGCSystem instance. * @property {FilterSystem} filter - FilterSystem instance. * @property {GlobalUniformSystem} globalUniforms - GlobalUniformSystem instance. * @property {TextureSystem} texture - TextureSystem instance. * @property {EventSystem} events - EventSystem instance. * @property {ExtractSystem} extract - ExtractSystem instance. Requires `import 'pixi.js/extract'`. * @property {PrepareSystem} prepare - PrepareSystem instance. Requires `import 'pixi.js/prepare'`. * @property {AccessibilitySystem} accessibility - AccessibilitySystem instance. Requires `import 'pixi.js/accessibility'`. */ export declare class AbstractRenderer extends EventEmitter<{ resize: [ screenWidth: number, screenHeight: number, resolution: number ]; }> { /** The default options for the renderer. */ static defaultOptions: { /** * Default resolution / device pixel ratio of the renderer. * @default 1 */ resolution: number; /** * Should the `failIfMajorPerformanceCaveat` flag be enabled as a context option used in the `isWebGLSupported` * function. If set to true, a WebGL renderer can fail to be created if the browser thinks there could be * performance issues when using WebGL. * * In PixiJS v6 this has changed from true to false by default, to allow WebGL to work in as many * scenarios as possible. However, some users may have a poor experience, for example, if a user has a gpu or * driver version blacklisted by the * browser. * * If your application requires high performance rendering, you may wish to set this to false. * We recommend one of two options if you decide to set this flag to false: * * 1: Use the Canvas renderer as a fallback in case high performance WebGL is * not supported. * * 2: Call `isWebGLSupported` (which if found in the utils package) in your code before attempting to create a * PixiJS renderer, and show an error message to the user if the function returns false, explaining that their * device & browser combination does not support high performance WebGL. * This is a much better strategy than trying to create a PixiJS renderer and finding it then fails. * @default false */ failIfMajorPerformanceCaveat: boolean; /** * Should round pixels be forced when rendering? * @default false */ roundPixels: boolean; }; /** @internal */ readonly type: number; /** The name of the renderer. */ readonly name: string; /** @internal */ readonly uid: number; /** @internal */ _roundPixels: 0 | 1; /** @internal */ readonly runners: Runners; /** @internal */ readonly renderPipes: PIPES; /** The view system manages the main canvas that is attached to the DOM */ view: ViewSystem; /** The background system manages the background color and alpha of the main view. */ background: BackgroundSystem; /** System that manages the generation of textures from the renderer */ textureGenerator: GenerateTextureSystem; protected _initOptions: OPTIONS; protected config: RendererConfig; private _systemsHash; private _lastObjectRendered; /** * Set up a system with a collection of SystemClasses and runners. * Systems are attached dynamically to this class when added. * @param config - the config for the system manager */ constructor(config: RendererConfig); /** * Initialize the renderer. * @param options - The options to use to create the renderer. */ init(options?: Partial): Promise; /** * Renders the object to its view. * @param options - The options to render with. * @param options.container - The container to render. * @param [options.target] - The target to render to. */ render(options: RenderOptions | Container): void; /** @deprecated since 8.0.0 */ render(container: Container, options: { renderTexture: any; }): void; /** * Resizes the WebGL view to the specified width and height. * @param desiredScreenWidth - The desired width of the screen. * @param desiredScreenHeight - The desired height of the screen. * @param resolution - The resolution / device pixel ratio of the renderer. */ resize(desiredScreenWidth: number, desiredScreenHeight: number, resolution?: number): void; /** * Clears the render target. * @param options - The options to use when clearing the render target. * @param options.target - The render target to clear. * @param options.clearColor - The color to clear with. * @param options.clear - The clear mode to use. * @advanced */ clear(options?: ClearOptions): void; /** The resolution / device pixel ratio of the renderer. */ get resolution(): number; set resolution(value: number); /** * Same as view.width, actual number of pixels in the canvas by horizontal. * @type {number} * @readonly * @default 800 */ get width(): number; /** * Same as view.height, actual number of pixels in the canvas by vertical. * @default 600 */ get height(): number; /** * The canvas element that everything is drawn to. * @type {environment.ICanvas} */ get canvas(): CANVAS; /** * the last object rendered by the renderer. Useful for other plugins like interaction managers * @readonly */ get lastObjectRendered(): Container; /** * Flag if we are rendering to the screen vs renderTexture * @readonly * @default true */ get renderingToScreen(): boolean; /** * Measurements of the screen. (0, 0, screenWidth, screenHeight). * * Its safe to use as filterArea or hitArea for the whole stage. */ get screen(): Rectangle; /** * Create a bunch of runners based of a collection of ids * @param runnerIds - the runner ids to add */ private _addRunners; private _addSystems; /** * Add a new system to the renderer. * @param ClassRef - Class reference * @param name - Property name for system, if not specified * will use a static `name` property on the class itself. This * name will be assigned as s property on the Renderer so make * sure it doesn't collide with properties on Renderer. * @returns Return instance of renderer */ private _addSystem; private _addPipes; destroy(options?: RendererDestroyOptions): void; /** * Generate a texture from a container. * @param options - options or container target to use when generating the texture * @returns a texture */ generateTexture(options: GenerateTextureOptions | Container): Texture; /** * Whether the renderer will round coordinates to whole pixels when rendering. * Can be overridden on a per scene item basis. */ get roundPixels(): boolean; /** * Overridable function by `pixi.js/unsafe-eval` to silence * throwing an error if platform doesn't support unsafe-evals. * @private * @ignore */ _unsafeEvalCheck(): void; /** * Resets the rendering state of the renderer. * This is useful when you want to use the WebGL context directly and need to ensure PixiJS's internal state * stays synchronized. When modifying the WebGL context state externally, calling this method before the next Pixi * render will reset all internal caches and ensure it executes correctly. * * This is particularly useful when combining PixiJS with other rendering engines like Three.js: * ```js * // Reset Three.js state * threeRenderer.resetState(); * * // Render a Three.js scene * threeRenderer.render(threeScene, threeCamera); * * // Reset PixiJS state since Three.js modified the WebGL context * pixiRenderer.resetState(); * * // Now render Pixi content * pixiRenderer.render(pixiScene); * ``` * @advanced */ resetState(): void; } declare const DefaultWebGLSystems: (typeof BackgroundSystem | typeof GlobalUniformSystem | typeof HelloSystem | typeof ViewSystem | typeof RenderGroupSystem | typeof TextureGCSystem | typeof GenerateTextureSystem | typeof ExtractSystem | typeof RendererInitHook | typeof RenderableGCSystem | typeof SchedulerSystem | typeof GlUboSystem | typeof GlBackBufferSystem | typeof GlContextSystem | typeof GlLimitsSystem | typeof GlBufferSystem | typeof GlTextureSystem | typeof GlRenderTargetSystem | typeof GlGeometrySystem | typeof GlUniformGroupSystem | typeof GlShaderSystem | typeof GlEncoderSystem | typeof GlStateSystem | typeof GlStencilSystem | typeof GlColorMaskSystem)[]; declare const DefaultWebGLPipes: (typeof BlendModePipe | typeof BatcherPipe | typeof SpritePipe | typeof RenderGroupPipe | typeof AlphaMaskPipe | typeof StencilMaskPipe | typeof ColorMaskPipe | typeof CustomRenderPipe)[]; /** * The default WebGL renderer, uses WebGL2 contexts. * @category rendering * @standard * @interface */ export type WebGLSystems = ExtractSystemTypes & PixiMixins.RendererSystems & PixiMixins.WebGLSystems; /** * The default WebGL renderer, uses WebGL2 contexts. * @internal */ export type WebGLPipes = ExtractSystemTypes & PixiMixins.RendererPipes & PixiMixins.WebGLPipes; /** * Options for WebGLRenderer. * @category rendering * @standard */ export interface WebGLOptions extends SharedRendererOptions, ExtractRendererOptions, PixiMixins.WebGLOptions { } export interface WebGLRenderer extends AbstractRenderer, WebGLSystems { } /** * The WebGL PixiJS Renderer. This renderer allows you to use the most common graphics API, WebGL (and WebGL2). * * ```ts * // Create a new renderer * const renderer = new WebGLRenderer(); * await renderer.init(); * * // Add the renderer to the stage * document.body.appendChild(renderer.canvas); * * // Create a new stage * const stage = new Container(); * * // Render the stage * renderer.render(stage); * ``` * * You can use {@link autoDetectRenderer} to create a renderer that will automatically detect the best * renderer for the environment. * * * ```ts * // Create a new renderer * const renderer = await rendering.autoDetectRenderer({ * preference:'webgl', * }); * ``` * * The renderer is composed of systems that manage specific tasks. The following systems are added by default * whenever you create a WebGL renderer: * * | WebGL Core Systems | Systems that are specific to the WebGL renderer | * | ------------------------------------------- | ----------------------------------------------------------------------------- | * | {@link GlUboSystem} | This manages WebGL2 uniform buffer objects feature for shaders | * | {@link GlBackBufferSystem} | manages the back buffer, used so that we can pixi can pixels from the screen | * | {@link GlContextSystem} | This manages the WebGL context and its extensions | * | {@link GlBufferSystem} | This manages buffers and their GPU resources, keeps everything in sync | * | {@link GlTextureSystem} | This manages textures and their GPU resources, keeps everything in sync | * | {@link GlRenderTargetSystem} | This manages what we render too. For example the screen, or another texture | * | {@link GlGeometrySystem} | This manages geometry, used for drawing meshes via the GPU | * | {@link GlUniformGroupSystem} | This manages uniform groups. Syncing shader properties with the GPU | * | {@link GlShaderSystem} | This manages shaders, programs that run on the GPU to output lovely pixels | * | {@link GlEncoderSystem} | This manages encoders, a WebGPU Paradigm, use it to draw a mesh + shader | * | {@link GlStateSystem} | This manages the state of the WebGL context. eg the various flags that can be set blend modes / depthTesting etc | * | {@link GlStencilSystem} | This manages the stencil buffer. Used primarily for masking | * | {@link GlColorMaskSystem} | This manages the color mask. Used for color masking | * * The breadth of the API surface provided by the renderer is contained within these systems. * @category rendering * @property {GlUboSystem} ubo - UboSystem instance. * @property {GlBackBufferSystem} backBuffer - BackBufferSystem instance. * @property {GlContextSystem} context - ContextSystem instance. * @property {GlBufferSystem} buffer - BufferSystem instance. * @property {GlTextureSystem} texture - TextureSystem instance. * @property {GlRenderTargetSystem} renderTarget - RenderTargetSystem instance. * @property {GlGeometrySystem} geometry - GeometrySystem instance. * @property {GlUniformGroupSystem} uniformGroup - UniformGroupSystem instance. * @property {GlShaderSystem} shader - ShaderSystem instance. * @property {GlEncoderSystem} encoder - EncoderSystem instance. * @property {GlStateSystem} state - StateSystem instance. * @property {GlStencilSystem} stencil - StencilSystem instance. * @property {GlColorMaskSystem} colorMask - ColorMaskSystem instance. * @extends AbstractRenderer * @standard */ export declare class WebGLRenderer extends AbstractRenderer implements WebGLSystems { gl: GlRenderingContext; constructor(); } /** * A generic renderer that can be either a WebGL or WebGPU renderer. * @category rendering * @extends WebGLRenderer * @extends WebGPURenderer * @standard */ export type Renderer = WebGLRenderer | WebGPURenderer; /** * Generic pipes for the renderer. * @category rendering * @advanced */ export type RenderPipes = WebGLPipes | WebGPUPipes; /** * Options for the renderer. * @extends WebGLOptions * @extends WebGPUOptions * @category rendering * @standard */ export interface RendererOptions extends WebGLOptions, WebGPUOptions { } /** * Ids for the different render types. * The idea is that you can use bitwise operations to filter whether or not you want to do something * in a certain render type. * Filters for example can be compatible for both webGL or WebGPU but not compatible with canvas. * So internally if it works with both we set filter.compatibleRenderers = RendererType.WEBGL | RendererType.WEBGPU * if it only works with webgl we set filter.compatibleRenderers = RendererType.WEBGL * @category rendering * @internal */ export declare enum RendererType { /** The WebGL renderer */ WEBGL = 1, /** The WebGPU renderer */ WEBGPU = 2, /** Either WebGL or WebGPU renderer */ BOTH = 3 } /** * The GPU power preference for the WebGPU context. * This is an optional hint indicating what configuration of GPU is suitable for the WebGPU context, * * - `'high-performance'` will prioritize rendering performance over power consumption, * - `'low-power'` will prioritize power saving over rendering performance. * @category rendering * @advanced */ export type GpuPowerPreference = "low-power" | "high-performance"; /** @internal */ export interface GPUData { destroy: () => void; } /** * Options for the construction of a ViewContainer. * @category scene * @advanced */ export interface ViewContainerOptions extends ContainerOptions, PixiMixins.ViewContainerOptions { } export interface ViewContainer extends PixiMixins.ViewContainer, Container { _gpuData: Record; } /** * A ViewContainer is a type of container that represents a view. * This view can be a Sprite, a Graphics object, or any other object that can be rendered. * This class is abstract and should not be used directly. * @category scene * @advanced */ export declare abstract class ViewContainer extends Container implements View { /** @internal */ readonly renderPipeId: string; /** @internal */ readonly canBundle = true; /** @internal */ allowChildren: boolean; /** @internal */ _roundPixels: 0 | 1; /** @internal */ _lastUsed: number; /** @internal */ _gpuData: Record; protected _bounds: Bounds; protected _boundsDirty: boolean; /** * The local bounds of the view in its own coordinate space. * Bounds are automatically updated when the view's content changes. * @example * ```ts * // Get bounds dimensions * const bounds = view.bounds; * console.log(`Width: ${bounds.maxX - bounds.minX}`); * console.log(`Height: ${bounds.maxY - bounds.minY}`); * ``` * @returns The rectangular bounds of the view * @see {@link Bounds} For bounds operations */ get bounds(): Bounds; /** @private */ protected abstract updateBounds(): void; /** * Whether or not to round the x/y position of the sprite. * @example * ```ts * // Enable pixel rounding for crisp rendering * view.roundPixels = true; * ``` * @default false */ get roundPixels(): boolean; set roundPixels(value: boolean); constructor(options: ViewContainerOptions); /** * Checks if the object contains the given point in local coordinates. * Uses the view's bounds for hit testing. * @example * ```ts * // Basic point check * const localPoint = { x: 50, y: 25 }; * const contains = view.containsPoint(localPoint); * console.log('Point is inside:', contains); * ``` * @param point - The point to check in local coordinates * @returns True if the point is within the view's bounds * @see {@link ViewContainer#bounds} For the bounds used in hit testing * @see {@link Container#toLocal} For converting global coordinates to local */ containsPoint(point: PointData): boolean; /** @private */ abstract batched: boolean; /** @private */ protected onViewUpdate(): void; destroy(options?: DestroyOptions): void; /** * Collects renderables for the view container. * @param instructionSet - The instruction set to collect renderables for. * @param renderer - The renderer to collect renderables for. * @param currentLayer - The current render layer. * @internal */ collectRenderablesSimple(instructionSet: InstructionSet, renderer: Renderer, currentLayer: IRenderLayer): void; } /** * Represents a renderable object in the rendering system. * This is typically a view container that can be rendered to a target. * @internal */ export type Renderable = ViewContainer; /** * A set of instructions that can be executed by the renderer. * Basically wraps an array, but with some extra properties that help the renderer * to keep things nice and optimised. * * Note: * InstructionSet.instructions contains all the instructions, but does not resize (for performance). * So for the true length of the instructions you need to use InstructionSet.instructionSize * @category rendering * @advanced */ export declare class InstructionSet { /** a unique id for this instruction set used through the renderer */ readonly uid: number; /** the array of instructions */ readonly instructions: Instruction[]; /** the actual size of the array (any instructions passed this should be ignored) */ instructionSize: number; /** allows for access to the render pipes of the renderer */ renderPipes: any; renderables: Renderable[]; /** used by the garbage collector to track when the instruction set was last used */ gcTick: number; /** reset the instruction set so it can be reused set size back to 0 */ reset(): void; /** * Add an instruction to the set * @param instruction - add an instruction to the set */ add(instruction: Instruction): void; /** * Log the instructions to the console (for debugging) * @internal */ log(): void; } /** * Options for configuring a RenderLayer. A RenderLayer allows control over rendering order * independent of the scene graph hierarchy. * @example * ```ts * // Basic layer with automatic sorting * const layer = new RenderLayer({ * sortableChildren: true * }); * * // Layer with custom sort function * const customLayer = new RenderLayer({ * sortableChildren: true, * sortFunction: (a, b) => { * // Sort by y position * return a.position.y - b.position.y; * } * }); * * // Add objects to layer while maintaining scene graph parent * const sprite = new Sprite(texture); * container.addChild(sprite); // Add to scene graph * layer.attach(sprite); // Add to render layer * * // Manual sorting when needed * const manualLayer = new RenderLayer({ * sortableChildren: false * }); * manualLayer.attach(sprite1, sprite2); * manualLayer.sortRenderLayerChildren(); // Sort manually * ``` * @category scene * @standard */ export interface RenderLayerOptions { /** * If true, the layer's children will be sorted by zIndex before rendering. * If false, you can manually sort the children using sortRenderLayerChildren when needed. * @default false * @example * ```ts * const layer = new RenderLayer({ * sortableChildren: true // Automatically sorts children by zIndex * }); * ``` * @see {@link RenderLayer#sortRenderLayerChildren} For manual sorting * @see {@link RenderLayer#sortFunction} For customizing the sort logic * @see {@link Container#zIndex} For the default sort property */ sortableChildren?: boolean; /** * Custom sort function to sort layer children. Default sorts by zIndex. * @param a - First container to compare * @param b - Second container to compare * @returns Negative if a should render before b, positive if b should render before a * @example * ```ts * const layer = new RenderLayer({ * sortFunction: (a, b) => { * // Sort by y position * return a.position.y - b.position.y; * } * }); * ``` * @see {@link RenderLayer#sortableChildren} For enabling automatic sorting * @see {@link RenderLayer#sortRenderLayerChildren} For manual sorting * @see {@link Container#zIndex} For the default sort property * @default (a, b) => a.zIndex - b.zIndex */ sortFunction?: (a: Container, b: Container) => number; } type ContainerKeys = keyof Container; type PartialContainerKeys = Exclude; /** @internal */ export type IRenderLayer = Omit; declare class RenderLayerClass extends Container { /** * Default options for RenderLayer instances. These options control the sorting behavior * of objects within the render layer. * @example * ```ts * // Create a custom render layer with modified default options * RenderLayer.defaultOptions = { * sortableChildren: true, * sortFunction: (a, b) => a.y - b.y // Sort by vertical position * }; * * // All new render layers will use these defaults * const layer1 = new RenderLayer(); * // layer1 will have sortableChildren = true * ``` * @property {boolean} sortableChildren - * @property {Function} sortFunction - * @see {@link RenderLayer} For the main render layer class * @see {@link Container#zIndex} For the default sort property * @see {@link RenderLayer#sortRenderLayerChildren} For manual sorting */ static defaultOptions: RenderLayerOptions; /** Function used to sort layer children if sortableChildren is true */ sortFunction: (a: Container, b: Container) => number; /** * The list of objects that this layer is responsible for rendering. Objects in this list maintain * their original parent in the scene graph but are rendered as part of this layer. * @example * ```ts * const layer = new RenderLayer(); * const sprite = new Sprite(texture); * * // Add sprite to scene graph for transforms * container.addChild(sprite); * * // Add to layer for render order control * layer.attach(sprite); * console.log(layer.renderLayerChildren.length); // 1 * * // Access objects in the layer * layer.renderLayerChildren.forEach(child => { * console.log('Layer child:', child); * }); * * // Check if object is in layer * const isInLayer = layer.renderLayerChildren.includes(sprite); * * // Clear all objects from layer * layer.detachAll(); * console.log(layer.renderLayerChildren.length); // 0 * ``` * @readonly * @see {@link RenderLayer#attach} For adding objects to the layer * @see {@link RenderLayer#detach} For removing objects from the layer * @see {@link RenderLayer#detachAll} For removing all objects from the layer */ renderLayerChildren: Container[]; /** * Creates a new RenderLayer instance * @param options - Configuration options for the RenderLayer * @param {boolean} [options.sortableChildren=false] - If true, layer children will be automatically sorted each render * @param {Function} [options.sortFunction] - Custom function to sort layer children. Default sorts by zIndex */ constructor(options?: RenderLayerOptions); /** * Adds one or more Containers to this render layer. The Containers will be rendered as part of this layer * while maintaining their original parent in the scene graph. * * If the Container already belongs to a layer, it will be removed from the old layer before being added to this one. * @example * ```ts * const layer = new RenderLayer(); * const container = new Container(); * const sprite1 = new Sprite(texture1); * const sprite2 = new Sprite(texture2); * * // Add sprites to scene graph for transforms * container.addChild(sprite1, sprite2); * * // Add sprites to layer for render order control * layer.attach(sprite1, sprite2); * * // Add single sprite with type checking * const typedSprite = layer.attach(new Sprite(texture3)); * typedSprite.tint = 'red'; * * // Automatically removes from previous layer if needed * const otherLayer = new RenderLayer(); * otherLayer.attach(sprite1); // Removes from previous layer * ``` * @param children - The Container(s) to add to this layer. Can be any Container or array of Containers. * @returns The first child that was added, for method chaining * @see {@link RenderLayer#detach} For removing objects from the layer * @see {@link RenderLayer#detachAll} For removing all objects from the layer * @see {@link Container#addChild} For adding to scene graph hierarchy */ attach(...children: U): U[0]; /** * Removes one or more Containers from this render layer. The Containers will maintain their * original parent in the scene graph but will no longer be rendered as part of this layer. * @example * ```ts * const layer = new RenderLayer(); * const container = new Container(); * const sprite1 = new Sprite(texture1); * const sprite2 = new Sprite(texture2); * * // Add sprites to scene graph and layer * container.addChild(sprite1, sprite2); * layer.attach(sprite1, sprite2); * * // Remove single sprite from layer * layer.detach(sprite1); * // sprite1 is still child of container but not rendered in layer * * // Remove multiple sprites at once * const otherLayer = new RenderLayer(); * otherLayer.attach(sprite3, sprite4); * otherLayer.detach(sprite3, sprite4); * * // Type-safe detachment * const typedSprite = layer.detach(spriteInLayer); * typedSprite.texture = newTexture; // TypeScript knows this is a Sprite * ``` * @param children - The Container(s) to remove from this layer * @returns The first child that was removed, for method chaining * @see {@link RenderLayer#attach} For adding objects to the layer * @see {@link RenderLayer#detachAll} For removing all objects from the layer * @see {@link Container#removeChild} For removing from scene graph hierarchy */ detach(...children: U): U[0]; /** * Removes all objects from this render layer. Objects will maintain their * original parent in the scene graph but will no longer be rendered as part of this layer. * @example * ```ts * const layer = new RenderLayer(); * const container = new Container(); * * // Add multiple sprites to scene graph and layer * const sprites = [ * new Sprite(texture1), * new Sprite(texture2), * new Sprite(texture3) * ]; * * container.addChild(...sprites); // Add to scene graph * layer.attach(...sprites); // Add to render layer * * // Later, remove all sprites from layer at once * layer.detachAll(); * console.log(layer.renderLayerChildren.length); // 0 * console.log(container.children.length); // 3 (still in scene graph) * ``` * @returns The RenderLayer instance for method chaining * @see {@link RenderLayer#attach} For adding objects to the layer * @see {@link RenderLayer#detach} For removing individual objects * @see {@link Container#removeChildren} For removing from scene graph */ detachAll(): void; /** * Collects renderables for this layer and its children. * This method is called by the renderer to gather all objects that should be rendered in this layer. * @param instructionSet - The set of instructions to collect renderables into. * @param renderer - The renderer that is collecting renderables. * @param _currentLayer - The current render layer being processed. * @internal */ collectRenderables(instructionSet: InstructionSet, renderer: Renderer, _currentLayer: RenderLayerClass): void; /** * Sort the layer's children using the defined sort function. This method allows manual sorting * of layer children and is automatically called during rendering if sortableChildren is true. * @example * ```ts * const layer = new RenderLayer(); * * // Add multiple sprites at different depths * const sprite1 = new Sprite(texture); * const sprite2 = new Sprite(texture); * const sprite3 = new Sprite(texture); * * sprite1.zIndex = 3; * sprite2.zIndex = 1; * sprite3.zIndex = 2; * * layer.attach(sprite1, sprite2, sprite3); * * // Manual sorting with default zIndex sort * layer.sortRenderLayerChildren(); * // Order is now: sprite2 (1), sprite3 (2), sprite1 (3) * * // Custom sort by y position * layer.sortFunction = (a, b) => a.y - b.y; * layer.sortRenderLayerChildren(); * * // Automatic sorting * layer.sortableChildren = true; // Will sort each render * ``` * @returns The RenderLayer instance for method chaining * @see {@link RenderLayer#sortableChildren} For enabling automatic sorting * @see {@link RenderLayer#sortFunction} For customizing the sort logic */ sortRenderLayerChildren(): void; /** * Recursively calculates the global bounds of this RenderLayer and its children. * @param factorRenderLayers * @param bounds * @param _currentLayer * @internal */ _getGlobalBoundsRecursive(factorRenderLayers: boolean, bounds: Bounds, _currentLayer: RenderLayerClass): void; } /** * The RenderLayer API provides a way to control the rendering order of objects independently * of their logical parent-child relationships in the scene graph. * This allows developers to decouple how objects are transformed * (via their logical parent) from how they are rendered on the screen. * * ### Key Concepts * * #### RenderLayers Control Rendering Order: * - RenderLayers define where in the render stack objects are drawn, * but they do not affect an object's transformations (e.g., position, scale, rotation) or logical hierarchy. * - RenderLayers can be added anywhere in the scene graph. * * #### Logical Parenting Remains Unchanged: * - Objects still have a logical parent for transformations via addChild. * - Assigning an object to a layer does not reparent it. * * #### Explicit Control: * - Developers assign objects to layers using renderLayer.add and remove them using renderLayer.remove. * --- * ### API Details * * #### 1. Creating a RenderLayer * A RenderLayer is a lightweight object responsible for controlling render order. * It has no children or transformations of its own * but can be inserted anywhere in the scene graph to define its render position. * ```js * const layer = new RenderLayer(); * app.stage.addChild(layer); // Insert the layer into the scene graph * ``` * * #### 2. Adding Objects to a Layer * Use renderLayer.add to assign an object to a layer. * This overrides the object's default render order defined by its logical parent. * ```js * const rect = new Graphics(); * container.addChild(rect); // Add to logical parent * layer.attach(rect); // Control render order via the layer * ``` * * #### 3. Removing Objects from a Layer * To stop an object from being rendered in the layer, use remove. * ```js * layer.remove(rect); // Stop rendering rect via the layer * ``` * When an object is removed from its logical parent (removeChild), it is automatically removed from the layer. * * #### 4. Re-Adding Objects to Layers * If an object is re-added to a logical parent, it does not automatically reassign itself to the layer. * Developers must explicitly reassign it. * ```js * container.addChild(rect); // Logical parent * layer.attach(rect); // Explicitly reassign to the layer * ``` * * #### 5. Layer Position in Scene Graph * A layer's position in the scene graph determines its render priority relative to other layers and objects. * Layers can be inserted anywhere in the scene graph. * ```js * const backgroundLayer = new RenderLayer(); * const uiLayer = new RenderLayer(); * * app.stage.addChild(backgroundLayer); * app.stage.addChild(world); * app.stage.addChild(uiLayer); * ``` * This is a new API and therefore considered experimental at this stage. * While the core is pretty robust, there are still a few tricky issues we need to tackle. * However, even with the known issues below, we believe this API is incredibly useful! * * Known issues: * - Interaction may not work as expected since hit testing does not account for the visual render order created by layers. * For example, if an object is visually moved to the front via a layer, hit testing will still use its original position. * - RenderLayers and their children must all belong to the same renderGroup to work correctly * @category scene * @class * @extends null * @standard */ export declare const RenderLayer: new (options?: RenderLayerOptions) => IRenderLayer; /** * The type of child that can be added to a {@link Container}. * This is a generic type that extends the {@link Container} class. * @category scene * @standard */ export type ContainerChild = Container; /** * Events that can be emitted by a Container. These events provide lifecycle hooks and notifications * for container state changes. * @example * ```ts * import { Container, Sprite } from 'pixi.js'; * * // Setup container with event listeners * const container = new Container(); * * // Listen for child additions * container.on('childAdded', (child, container, index) => { * console.log(`Child added at index ${index}:`, child); * }); * * // Listen for child removals * container.on('childRemoved', (child, container, index) => { * console.log(`Child removed from index ${index}:`, child); * }); * * // Listen for when container is added to parent * container.on('added', (parent) => { * console.log('Added to parent:', parent); * }); * * // Listen for when container is removed from parent * container.on('removed', (parent) => { * console.log('Removed from parent:', parent); * }); * * // Listen for container destruction * container.on('destroyed', (container) => { * console.log('Container destroyed:', container); * }); * ``` * @category scene * @standard */ export interface ContainerEvents extends PixiMixins.ContainerEvents { /** * Emitted when this container is added to a new container. * Useful for setting up parent-specific behaviors. * @param container - The parent container this was added to * @example * ```ts * const child = new Container(); * child.on('added', (parent) => { * console.log('Child added to parent:', parent.label); * }); * parentContainer.addChild(child); * ``` */ added: [ container: Container ]; /** * Emitted when a child is added to this container. * Useful for tracking container composition changes. * @param child - The child that was added * @param container - The container the child was added to (this container) * @param index - The index at which the child was added * @example * ```ts * const parent = new Container(); * parent.on('childAdded', (child, container, index) => { * console.log(`New child at index ${index}:`, child); * }); * ``` */ childAdded: [ child: C, container: Container, index: number ]; /** * Emitted when this container is removed from its parent. * Useful for cleanup and state management. * @param container - The parent container this was removed from * @example * ```ts * const child = new Container(); * child.on('removed', (oldParent) => { * console.log('Child removed from parent:', oldParent.label); * }); * ``` */ removed: [ container: Container ]; /** * Emitted when a child is removed from this container. * Useful for cleanup and maintaining container state. * @param child - The child that was removed * @param container - The container the child was removed from (this container) * @param index - The index from which the child was removed * @example * ```ts * const parent = new Container(); * parent.on('childRemoved', (child, container, index) => { * console.log(`Child removed from index ${index}:`, child); * }); * ``` */ childRemoved: [ child: C, container: Container, index: number ]; /** * Emitted when the container is destroyed. * Useful for final cleanup and resource management. * @param container - The container that was destroyed * @example * ```ts * const container = new Container(); * container.on('destroyed', (container) => { * console.log('Container destroyed:', container.label); * }); * ``` */ destroyed: [ container: Container ]; } type AnyEvent = { [K: ({} & string) | ({} & symbol)]: any; }; /** @internal */ export declare const UPDATE_COLOR = 1; /** @internal */ export declare const UPDATE_BLEND = 2; /** @internal */ export declare const UPDATE_VISIBLE = 4; /** @internal */ export declare const UPDATE_TRANSFORM = 8; /** * Options for updating the transform of a container. * @category scene * @standard */ export interface UpdateTransformOptions { x: number; y: number; scaleX: number; scaleY: number; rotation: number; skewX: number; skewY: number; pivotX: number; pivotY: number; originX: number; originY: number; } /** * Constructor options used for `Container` instances. * ```js * const container = new Container({ * position: new Point(100, 200), * scale: new Point(2, 2), * rotation: Math.PI / 2, * }); * ``` * @category scene * @standard * @see Container */ export interface ContainerOptions extends PixiMixins.ContainerOptions { /** @see Container#isRenderGroup */ isRenderGroup?: boolean; /** * The blend mode to be applied to the sprite. Controls how pixels are blended when rendering. * * Setting to 'normal' will reset to default blending. * > [!NOTE] More blend modes are available after importing the `pixi.js/advanced-blend-modes` sub-export. * @example * ```ts * // Basic blend modes * new Container({ blendMode: 'normal' }); // Default blending * new Container({ blendMode: 'add' }); // Additive blending * new Container({ blendMode: 'multiply' }); // Multiply colors * new Container({ blendMode: 'screen' }); // Screen blend * ``` * @default 'normal' * @see {@link Container#alpha} For transparency * @see {@link Container#tint} For color adjustments */ blendMode?: BLEND_MODES; /** * The tint applied to the sprite. * * This can be any valid {@link ColorSource}. * @example * ```ts * new Container({ tint: 0xff0000 }); // Red tint * new Container({ tint: 'blue' }); // Blue tint * new Container({ tint: '#00ff00' }); // Green tint * new Container({ tint: 'rgb(0,0,255)' }); // Blue tint * ``` * @default 0xFFFFFF * @see {@link Container#alpha} For transparency * @see {@link Container#visible} For visibility control */ tint?: ColorSource; /** * The opacity of the object relative to its parent's opacity. * Value ranges from 0 (fully transparent) to 1 (fully opaque). * @example * ```ts * new Container({ alpha: 0.5 }); // 50% opacity * new Container({ alpha: 1 }); // Fully opaque * ``` * @default 1 * @see {@link Container#visible} For toggling visibility * @see {@link Container#renderable} For render control */ alpha?: number; /** * The angle of the object in degrees. * * > [!NOTE] 'rotation' and 'angle' have the same effect on a display object; * > rotation is in radians, angle is in degrees. * @example * ```ts * new Container({ angle: 45 }); // Rotate 45 degrees * new Container({ angle: 90 }); // Rotate 90 degrees * ``` */ angle?: number; /** * The array of children of this container. Each child must be a Container or extend from it. * * The array is read-only, but its contents can be modified using Container methods. * @example * ```ts * new Container({ * children: [ * new Container(), // First child * new Container(), // Second child * ], * }); * ``` * @readonly * @see {@link Container#addChild} For adding children * @see {@link Container#removeChild} For removing children */ children?: C[]; /** * The display object container that contains this display object. * This represents the parent-child relationship in the display tree. * @readonly * @see {@link Container#addChild} For adding to a parent * @see {@link Container#removeChild} For removing from parent */ parent?: Container; /** * Controls whether this object can be rendered. If false the object will not be drawn, * but the transform will still be updated. This is different from visible, which skips * transform updates. * @example * ```ts * new Container({ renderable: false }); // Will not be drawn, but transforms will update * ``` * @default true * @see {@link Container#visible} For skipping transform updates * @see {@link Container#alpha} For transparency */ renderable?: boolean; /** * The rotation of the object in radians. * * > [!NOTE] 'rotation' and 'angle' have the same effect on a display object; * > rotation is in radians, angle is in degrees. * @example * ```ts * new Container({ rotation: Math.PI / 4 }); // Rotate 45 degrees * new Container({ rotation: Math.PI / 2 }); // Rotate 90 degrees * ``` */ rotation?: number; /** * The scale factors of this object along the local coordinate axes. * * The default scale is (1, 1). * @example * ```ts * new Container({ scale: new Point(2, 2) }); // Scale by 2x * new Container({ scale: 0.5 }); // Scale by 0.5x * new Container({ scale: { x: 1.5, y: 1.5 } }); // Scale by 1.5x * ``` */ scale?: PointData | number; /** * The center of rotation, scaling, and skewing for this display object in its local space. * The `position` is the projection of `pivot` in the parent's local space. * * By default, the pivot is the origin (0, 0). * @example * ```ts * new Container({ pivot: new Point(100, 200) }); // Set pivot to (100, 200) * new Container({ pivot: 50 }); // Set pivot to (50, 50) * new Container({ pivot: { x: 150, y: 150 } }); // Set pivot to (150, 150) * ``` */ pivot?: PointData | number; /** * The origin point around which the container rotates and scales. * Unlike pivot, changing origin will not move the container's position. * @example * ```ts * new Container({ origin: new Point(100, 100) }); // Rotate around point (100,100) * new Container({ origin: 50 }); // Rotate around point (50, 50) * new Container({ origin: { x: 150, y: 150 } }); // Rotate around point (150, 150) * ``` */ origin?: PointData | number; /** * The coordinate of the object relative to the local coordinates of the parent. * @example * ```ts * new Container({ position: new Point(100, 200) }); // Set position to (100, 200) * new Container({ position: { x: 150, y: 150 } }); // Set position to (150, 150) * ``` */ position?: PointData; /** * The skew factor for the object in radians. Skewing is a transformation that distorts * the object by rotating it differently at each point, creating a non-uniform shape. * @example * ```ts * new Container({ skew: new Point(0.1, 0.2) }); // Skew by 0.1 radians on x and 0.2 radians on y * new Container({ skew: { x: 0.1, y: 0.2 } }); // Skew by 0.1 radians on x and 0.2 radians on y * ``` * @default { x: 0, y: 0 } */ skew?: PointData; /** * The visibility of the object. If false the object will not be drawn, * and the transform will not be updated. * @example * ```ts * new Container({ visible: false }); // Will not be drawn and transforms will not update * new Container({ visible: true }); // Will be drawn and transforms will update * ``` * @default true * @see {@link Container#renderable} For render-only control * @see {@link Container#alpha} For transparency */ visible?: boolean; /** * The position of the container on the x axis relative to the local coordinates of the parent. * * An alias to position.x * @example * ```ts * new Container({ x: 100 }); // Set x position to 100 * ``` */ x?: number; /** * The position of the container on the y axis relative to the local coordinates of the parent. * * An alias to position.y * @example * ```ts * new Container({ y: 200 }); // Set y position to 200 * ``` */ y?: number; /** * An optional bounds area for this container. Setting this rectangle will stop the renderer * from recursively measuring the bounds of each children and instead use this single boundArea. * * > [!IMPORTANT] This is great for optimisation! If for example you have a * > 1000 spinning particles and you know they all sit within a specific bounds, * > then setting it will mean the renderer will not need to measure the * > 1000 children to find the bounds. Instead it will just use the bounds you set. * @example * ```ts * const container = new Container({ * boundsArea: new Rectangle(0, 0, 500, 500) // Set a fixed bounds area * }); * ``` */ boundsArea?: Rectangle; } export interface Container extends PixiMixins.Container, EventEmitter & AnyEvent> { } /** * Container is a general-purpose display object that holds children. It also adds built-in support for advanced * rendering features like masking and filtering. * * It is the base class of all display objects that act as a container for other objects, including Graphics * and Sprite. * *

* * Transforms * * The [transform]{@link Container#localTransform} of a display object describes the projection from its * local coordinate space to its parent's local coordinate space. The following properties are derived * from the transform: * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
PropertyDescription
[pivot]{@link Container#pivot} * Invariant under rotation, scaling, and skewing. The projection of into the parent's space of the pivot * is equal to position, regardless of the other three transformations. In other words, It is the center of * rotation, scaling, and skewing. *
[position]{@link Container#position} * Translation. This is the position of the [pivot]{@link Container#pivot} in the parent's local * space. The default value of the pivot is the origin (0,0). If the top-left corner of your display object * is (0,0) in its local space, then the position will be its top-left corner in the parent's local space. *
[scale]{@link Container#scale} * Scaling. This will stretch (or compress) the display object's projection. The scale factors are along the * local coordinate axes. In other words, the display object is scaled before rotated or skewed. The center * of scaling is the [pivot]{@link Container#pivot}. *
[rotation]{@link Container#rotation} * Rotation. This will rotate the display object's projection by this angle (in radians). *
[skew]{@link Container#skew} *

Skewing. This can be used to deform a rectangular display object into a parallelogram.

*

* In PixiJS, skew has a slightly different behaviour than the conventional meaning. It can be * thought of the net rotation applied to the coordinate axes (separately). For example, if "skew.x" is * ⍺ and "skew.y" is β, then the line x = 0 will be rotated by ⍺ (y = -x*cot⍺) and the line y = 0 will be * rotated by β (y = x*tanβ). A line y = x*tanϴ (i.e. a line at angle ϴ to the x-axis in local-space) will * be rotated by an angle between ⍺ and β. *

*

* It can be observed that if skew is applied equally to both axes, then it will be equivalent to applying * a rotation. Indeed, if "skew.x" = -ϴ and "skew.y" = ϴ, it will produce an equivalent of "rotation" = ϴ. *

*

* Another quite interesting observation is that "skew.x", "skew.y", rotation are commutative operations. Indeed, * because rotation is essentially a careful combination of the two. *

*
[angle]{@link Container#angle}Rotation. This is an alias for [rotation]{@link Container#rotation}, but in degrees.
[x]{@link Container#x}Translation. This is an alias for position.x!
[y]{@link Container#y}Translation. This is an alias for position.y!
[width]{@link Container#width} * Implemented in [Container]{@link Container}. Scaling. The width property calculates scale.x by dividing * the "requested" width by the local bounding box width. It is indirectly an abstraction over scale.x, and there * is no concept of user-defined width. *
[height]{@link Container#height} * Implemented in [Container]{@link Container}. Scaling. The height property calculates scale.y by dividing * the "requested" height by the local bounding box height. It is indirectly an abstraction over scale.y, and there * is no concept of user-defined height. *
*
* *
* Alpha * * This alpha sets a display object's **relative opacity** w.r.t its parent. For example, if the alpha of a display * object is 0.5 and its parent's alpha is 0.5, then it will be rendered with 25% opacity (assuming alpha is not * applied on any ancestor further up the chain). *
* *
* Renderable vs Visible * * The `renderable` and `visible` properties can be used to prevent a display object from being rendered to the * screen. However, there is a subtle difference between the two. When using `renderable`, the transforms of the display * object (and its children subtree) will continue to be calculated. When using `visible`, the transforms will not * be calculated. * ```ts * import { BlurFilter, Container, Graphics, Sprite } from 'pixi.js'; * * const container = new Container(); * const sprite = Sprite.from('https://s3-us-west-2.amazonaws.com/s.cdpn.io/693612/IaUrttj.png'); * * sprite.width = 512; * sprite.height = 512; * * // Adds a sprite as a child to this container. As a result, the sprite will be rendered whenever the container * // is rendered. * container.addChild(sprite); * * // Blurs whatever is rendered by the container * container.filters = [new BlurFilter()]; * * // Only the contents within a circle at the center should be rendered onto the screen. * container.mask = new Graphics() * .beginFill(0xffffff) * .drawCircle(sprite.width / 2, sprite.height / 2, Math.min(sprite.width, sprite.height) / 2) * .endFill(); * ``` * *
* *
* RenderGroup * * In PixiJS v8, containers can be set to operate in 'render group mode', * transforming them into entities akin to a stage in traditional rendering paradigms. * A render group is a root renderable entity, similar to a container, * but it's rendered in a separate pass with its own unique set of rendering instructions. * This approach enhances rendering efficiency and organization, particularly in complex scenes. * * You can enable render group mode on any container using container.enableRenderGroup() * or by initializing a new container with the render group property set to true (new Container({isRenderGroup: true})). * The method you choose depends on your specific use case and setup requirements. * * An important aspect of PixiJS’s rendering process is the automatic treatment of rendered scenes as render groups. * This conversion streamlines the rendering process, but understanding when and how this happens is crucial * to fully leverage its benefits. * * One of the key advantages of using render groups is the performance efficiency in moving them. Since transformations * are applied at the GPU level, moving a render group, even one with complex and numerous children, * doesn't require recalculating the rendering instructions or performing transformations on each child. * This makes operations like panning a large game world incredibly efficient. * * However, it's crucial to note that render groups do not batch together. * This means that turning every container into a render group could actually slow things down, * as each render group is processed separately. It's best to use render groups judiciously, at a broader level, * rather than on a per-child basis. * This approach ensures you get the performance benefits without overburdening the rendering process. * * RenderGroups maintain their own set of rendering instructions, * ensuring that changes or updates within a render group don't affect the rendering * instructions of its parent or other render groups. * This isolation ensures more stable and predictable rendering behavior. * * Additionally, renderGroups can be nested, allowing for powerful options in organizing different aspects of your scene. * This feature is particularly beneficial for separating complex game graphics from UI elements, * enabling intricate and efficient scene management in complex applications. * * This means that Containers have 3 levels of matrix to be mindful of: * * 1. localTransform, this is the transform of the container based on its own properties * 2. groupTransform, this it the transform of the container relative to the renderGroup it belongs too * 3. worldTransform, this is the transform of the container relative to the Scene being rendered *
* @category scene * @standard */ export declare class Container extends EventEmitter & AnyEvent> { /** * Mixes all enumerable properties and methods from a source object to Container. * @param source - The source of properties and methods to mix in. * @deprecated since 8.8.0 */ static mixin(source: Dict): void; /** * unique id for this container * @internal */ readonly uid: number; /** @private */ _updateFlags: number; /** @private */ renderGroup: RenderGroup; /** @private */ parentRenderGroup: RenderGroup; /** @private */ parentRenderGroupIndex: number; /** @private */ didChange: boolean; /** @private */ didViewUpdate: boolean; /** @private */ relativeRenderGroupDepth: number; /** * The array of children of this container. Each child must be a Container or extend from it. * * The array is read-only, but its contents can be modified using Container methods. * @example * ```ts * // Access children * const firstChild = container.children[0]; * const lastChild = container.children[container.children.length - 1]; * ``` * @readonly * @see {@link Container#addChild} For adding children * @see {@link Container#removeChild} For removing children */ children: C[]; /** * The display object container that contains this display object. * This represents the parent-child relationship in the display tree. * @example * ```ts * // Basic parent access * const parent = sprite.parent; * * // Walk up the tree * let current = sprite; * while (current.parent) { * console.log('Level up:', current.parent.constructor.name); * current = current.parent; * } * ``` * @readonly * @see {@link Container#addChild} For adding to a parent * @see {@link Container#removeChild} For removing from parent */ parent: Container | null; /** @private */ includeInBuild: boolean; /** @private */ measurable: boolean; /** @private */ isSimple: boolean; /** * The RenderLayer this container belongs to, if any. * If it belongs to a RenderLayer, it will be rendered from the RenderLayer's position in the scene. * @readonly * @advanced */ parentRenderLayer: IRenderLayer; /** @internal */ updateTick: number; /** * Current transform of the object based on local factors: position, scale, other stuff. * This matrix represents the local transformation without any parent influence. * @example * ```ts * // Basic transform access * const localMatrix = sprite.localTransform; * console.log(localMatrix.toString()); * ``` * @readonly * @see {@link Container#worldTransform} For global transform * @see {@link Container#groupTransform} For render group transform */ localTransform: Matrix; /** * The relative group transform is a transform relative to the render group it belongs too. It will include all parent * transforms and up to the render group (think of it as kind of like a stage - but the stage can be nested). * If this container is is self a render group matrix will be relative to its parent render group * @readonly * @advanced */ relativeGroupTransform: Matrix; /** * The group transform is a transform relative to the render group it belongs too. * If this container is render group then this will be an identity matrix. other wise it * will be the same as the relativeGroupTransform. * Use this value when actually rendering things to the screen * @readonly * @advanced */ groupTransform: Matrix; private _worldTransform; /** * Whether this object has been destroyed. If true, the object should no longer be used. * After an object is destroyed, all of its functionality is disabled and references are removed. * @example * ```ts * // Cleanup with destroy * sprite.destroy(); * console.log(sprite.destroyed); // true * ``` * @default false * @see {@link Container#destroy} For destroying objects */ destroyed: boolean; /** * The coordinate of the object relative to the local coordinates of the parent. * @internal */ _position: ObservablePoint; /** * The scale factor of the object. * @internal */ _scale: ObservablePoint; /** * The pivot point of the container that it rotates around. * @internal */ _pivot: ObservablePoint; /** * The origin point around which the container rotates and scales. * Unlike pivot, changing origin will not move the container's position. * @private */ _origin: ObservablePoint; /** * The skew amount, on the x and y axis. * @internal */ _skew: ObservablePoint; /** * The X-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. * @internal */ _cx: number; /** * The Y-coordinate value of the normalized local X axis, * the first column of the local transformation matrix without a scale. * @internal */ _sx: number; /** * The X-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. * @internal */ _cy: number; /** * The Y-coordinate value of the normalized local Y axis, * the second column of the local transformation matrix without a scale. * @internal */ _sy: number; /** * The rotation amount. * @internal */ private _rotation; /** @internal */ localColor: number; /** @internal */ localAlpha: number; /** @internal */ groupAlpha: number; /** @internal */ groupColor: number; /** @internal */ groupColorAlpha: number; /** @internal */ localBlendMode: BLEND_MODES; /** @internal */ groupBlendMode: BLEND_MODES; /** * This property holds three bits: culled, visible, renderable * the third bit represents culling (0 = culled, 1 = not culled) 0b100 * the second bit represents visibility (0 = not visible, 1 = visible) 0b010 * the first bit represents renderable (0 = not renderable, 1 = renderable) 0b001 * @internal */ localDisplayStatus: number; /** @internal */ globalDisplayStatus: number; /** @internal */ readonly renderPipeId: string; /** * An optional bounds area for this container. Setting this rectangle will stop the renderer * from recursively measuring the bounds of each children and instead use this single boundArea. * * > [!IMPORTANT] This is great for optimisation! If for example you have a * > 1000 spinning particles and you know they all sit within a specific bounds, * > then setting it will mean the renderer will not need to measure the * > 1000 children to find the bounds. Instead it will just use the bounds you set. * @example * ```ts * const container = new Container(); * container.boundsArea = new Rectangle(0, 0, 500, 500); * ``` */ boundsArea: Rectangle; /** * A value that increments each time the containe is modified * eg children added, removed etc * @ignore */ _didContainerChangeTick: number; /** * A value that increments each time the container view is modified * eg texture swap, geometry change etc * @ignore */ _didViewChangeTick: number; /** @internal */ layerParentId: string; /** * We now use the _didContainerChangeTick and _didViewChangeTick to track changes * @deprecated since 8.2.6 * @ignore */ set _didChangeId(value: number); /** @ignore */ get _didChangeId(): number; /** * property that tracks if the container transform has changed * @ignore */ private _didLocalTransformChangeId; constructor(options?: ContainerOptions); /** * Adds one or more children to the container. * The children will be rendered as part of this container's display list. * @example * ```ts * // Add a single child * container.addChild(sprite); * * // Add multiple children * container.addChild(background, player, foreground); * * // Add with type checking * const sprite = container.addChild(new Sprite(texture)); * sprite.tint = 'red'; * ``` * @param children - The Container(s) to add to the container * @returns The first child that was added * @see {@link Container#removeChild} For removing children * @see {@link Container#addChildAt} For adding at specific index */ addChild(...children: U): U[0]; /** * Removes one or more children from the container. * When removing multiple children, events will be triggered for each child in sequence. * @example * ```ts * // Remove a single child * const removed = container.removeChild(sprite); * * // Remove multiple children * const bg = container.removeChild(background, player, userInterface); * * // Remove with type checking * const sprite = container.removeChild(childSprite); * sprite.texture = newTexture; * ``` * @param children - The Container(s) to remove * @returns The first child that was removed * @see {@link Container#addChild} For adding children * @see {@link Container#removeChildren} For removing multiple children */ removeChild(...children: U): U[0]; /** @ignore */ _onUpdate(point?: ObservablePoint): void; set isRenderGroup(value: boolean); /** * Returns true if this container is a render group. * This means that it will be rendered as a separate pass, with its own set of instructions * @advanced */ get isRenderGroup(): boolean; /** * Calling this enables a render group for this container. * This means it will be rendered as a separate set of instructions. * The transform of the container will also be handled on the GPU rather than the CPU. * @advanced */ enableRenderGroup(): void; /** * This will disable the render group for this container. * @advanced */ disableRenderGroup(): void; /** @ignore */ _updateIsSimple(): void; /** * Current transform of the object based on world (parent) factors. * * This matrix represents the absolute transformation in the scene graph. * @example * ```ts * // Get world position * const worldPos = container.worldTransform; * console.log(`World position: (${worldPos.tx}, ${worldPos.ty})`); * ``` * @readonly * @see {@link Container#localTransform} For local space transform */ get worldTransform(): Matrix; /** * The position of the container on the x axis relative to the local coordinates of the parent. * * An alias to position.x * @example * ```ts * // Basic position * container.x = 100; * ``` */ get x(): number; set x(value: number); /** * The position of the container on the y axis relative to the local coordinates of the parent. * * An alias to position.y * @example * ```ts * // Basic position * container.y = 200; * ``` */ get y(): number; set y(value: number); /** * The coordinate of the object relative to the local coordinates of the parent. * @example * ```ts * // Basic position setting * container.position.set(100, 200); * container.position.set(100); // Sets both x and y to 100 * // Using point data * container.position = { x: 50, y: 75 }; * ``` * @since 4.0.0 */ get position(): ObservablePoint; set position(value: PointData); /** * The rotation of the object in radians. * * > [!NOTE] 'rotation' and 'angle' have the same effect on a display object; * > rotation is in radians, angle is in degrees. * @example * ```ts * // Basic rotation * container.rotation = Math.PI / 4; // 45 degrees * * // Convert from degrees * const degrees = 45; * container.rotation = degrees * Math.PI / 180; * * // Rotate around center * container.pivot.set(container.width / 2, container.height / 2); * container.rotation = Math.PI; // 180 degrees * * // Rotate around center with origin * container.origin.set(container.width / 2, container.height / 2); * container.rotation = Math.PI; // 180 degrees * ``` */ get rotation(): number; set rotation(value: number); /** * The angle of the object in degrees. * * > [!NOTE] 'rotation' and 'angle' have the same effect on a display object; * > rotation is in radians, angle is in degrees. * @example * ```ts * // Basic angle rotation * sprite.angle = 45; // 45 degrees * * // Rotate around center * sprite.pivot.set(sprite.width / 2, sprite.height / 2); * sprite.angle = 180; // Half rotation * * // Rotate around center with origin * sprite.origin.set(sprite.width / 2, sprite.height / 2); * sprite.angle = 180; // Half rotation * * // Reset rotation * sprite.angle = 0; * ``` */ get angle(): number; set angle(value: number); /** * The center of rotation, scaling, and skewing for this display object in its local space. * The `position` is the projection of `pivot` in the parent's local space. * * By default, the pivot is the origin (0, 0). * @example * ```ts * // Rotate around center * container.pivot.set(container.width / 2, container.height / 2); * container.rotation = Math.PI; // Rotates around center * ``` * @since 4.0.0 */ get pivot(): ObservablePoint; set pivot(value: PointData | number); /** * The skew factor for the object in radians. Skewing is a transformation that distorts * the object by rotating it differently at each point, creating a non-uniform shape. * @example * ```ts * // Basic skewing * container.skew.set(0.5, 0); // Skew horizontally * container.skew.set(0, 0.5); // Skew vertically * * // Skew with point data * container.skew = { x: 0.3, y: 0.3 }; // Diagonal skew * * // Reset skew * container.skew.set(0, 0); * * // Animate skew * app.ticker.add(() => { * // Create wave effect * container.skew.x = Math.sin(Date.now() / 1000) * 0.3; * }); * * // Combine with rotation * container.rotation = Math.PI / 4; // 45 degrees * container.skew.set(0.2, 0.2); // Skew the rotated object * ``` * @since 4.0.0 * @type {ObservablePoint} Point-like object with x/y properties in radians * @default {x: 0, y: 0} */ get skew(): ObservablePoint; set skew(value: PointData); /** * The scale factors of this object along the local coordinate axes. * * The default scale is (1, 1). * @example * ```ts * // Basic scaling * container.scale.set(2, 2); // Scales to double size * container.scale.set(2); // Scales uniformly to double size * container.scale = 2; // Scales uniformly to double size * // Scale to a specific width and height * container.setSize(200, 100); // Sets width to 200 and height to 100 * ``` * @since 4.0.0 */ get scale(): ObservablePoint; set scale(value: PointData | number | string); /** * @experimental * The origin point around which the container rotates and scales without affecting its position. * Unlike pivot, changing the origin will not move the container's position. * @example * ```ts * // Rotate around center point * container.origin.set(container.width / 2, container.height / 2); * container.rotation = Math.PI; // Rotates around center * * // Reset origin * container.origin.set(0, 0); * ``` */ get origin(): ObservablePoint; set origin(value: PointData | number); /** * The width of the Container, setting this will actually modify the scale to achieve the value set. * > [!NOTE] Changing the width will adjust the scale.x property of the container while maintaining its aspect ratio. * > [!NOTE] If you want to set both width and height at the same time, use {@link Container#setSize} * as it is more optimized by not recalculating the local bounds twice. * @example * ```ts * // Basic width setting * container.width = 100; * // Optimized width setting * container.setSize(100, 100); * ``` */ get width(): number; set width(value: number); /** * The height of the Container, * > [!NOTE] Changing the height will adjust the scale.y property of the container while maintaining its aspect ratio. * > [!NOTE] If you want to set both width and height at the same time, use {@link Container#setSize} * as it is more optimized by not recalculating the local bounds twice. * @example * ```ts * // Basic height setting * container.height = 200; * // Optimized height setting * container.setSize(100, 200); * ``` */ get height(): number; set height(value: number); /** * Retrieves the size of the container as a [Size]{@link Size} object. * * This is faster than get the width and height separately. * @example * ```ts * // Basic size retrieval * const size = container.getSize(); * console.log(`Size: ${size.width}x${size.height}`); * * // Reuse existing size object * const reuseSize = { width: 0, height: 0 }; * container.getSize(reuseSize); * ``` * @param out - Optional object to store the size in. * @returns The size of the container. */ getSize(out?: Size): Size; /** * Sets the size of the container to the specified width and height. * This is more efficient than setting width and height separately as it only recalculates bounds once. * @example * ```ts * // Basic size setting * container.setSize(100, 200); * * // Set uniform size * container.setSize(100); // Sets both width and height to 100 * ``` * @param value - This can be either a number or a [Size]{@link Size} object. * @param height - The height to set. Defaults to the value of `width` if not provided. */ setSize(value: number | Optional, height?: number): void; /** Called when the skew or the rotation changes. */ private _updateSkew; /** * Updates the transform properties of the container. * Allows partial updates of transform properties for optimized manipulation. * @example * ```ts * // Basic transform update * container.updateTransform({ * x: 100, * y: 200, * rotation: Math.PI / 4 * }); * * // Scale and rotate around center * sprite.updateTransform({ * pivotX: sprite.width / 2, * pivotY: sprite.height / 2, * scaleX: 2, * scaleY: 2, * rotation: Math.PI * }); * * // Update position only * button.updateTransform({ * x: button.x + 10, // Move right * y: button.y // Keep same y * }); * ``` * @param opts - Transform options to update * @param opts.x - The x position * @param opts.y - The y position * @param opts.scaleX - The x-axis scale factor * @param opts.scaleY - The y-axis scale factor * @param opts.rotation - The rotation in radians * @param opts.skewX - The x-axis skew factor * @param opts.skewY - The y-axis skew factor * @param opts.pivotX - The x-axis pivot point * @param opts.pivotY - The y-axis pivot point * @returns This container, for chaining * @see {@link Container#setFromMatrix} For matrix-based transforms * @see {@link Container#position} For direct position access */ updateTransform(opts: Partial): this; /** * Updates the local transform properties by decomposing the given matrix. * Extracts position, scale, rotation, and skew from a transformation matrix. * @example * ```ts * // Basic matrix transform * const matrix = new Matrix() * .translate(100, 100) * .rotate(Math.PI / 4) * .scale(2, 2); * * container.setFromMatrix(matrix); * * // Copy transform from another container * const source = new Container(); * source.position.set(100, 100); * source.rotation = Math.PI / 2; * * target.setFromMatrix(source.localTransform); * * // Reset transform * container.setFromMatrix(Matrix.IDENTITY); * ``` * @param matrix - The matrix to use for updating the transform * @see {@link Container#updateTransform} For property-based updates * @see {@link Matrix#decompose} For matrix decomposition details */ setFromMatrix(matrix: Matrix): void; /** Updates the local transform. */ updateLocalTransform(): void; set alpha(value: number); /** * The opacity of the object relative to its parent's opacity. * Value ranges from 0 (fully transparent) to 1 (fully opaque). * @example * ```ts * // Basic transparency * sprite.alpha = 0.5; // 50% opacity * * // Inherited opacity * container.alpha = 0.5; * const child = new Sprite(texture); * child.alpha = 0.5; * container.addChild(child); * // child's effective opacity is 0.25 (0.5 * 0.5) * ``` * @default 1 * @see {@link Container#visible} For toggling visibility * @see {@link Container#renderable} For render control */ get alpha(): number; set tint(value: ColorSource); /** * The tint applied to the sprite. * * This can be any valid {@link ColorSource}. * @example * ```ts * // Basic color tinting * container.tint = 0xff0000; // Red tint * container.tint = 'red'; // Same as above * container.tint = '#00ff00'; // Green * container.tint = 'rgb(0,0,255)'; // Blue * * // Remove tint * container.tint = 0xffffff; // White = no tint * container.tint = null; // Also removes tint * ``` * @default 0xFFFFFF * @see {@link Container#alpha} For transparency * @see {@link Container#visible} For visibility control */ get tint(): number; set blendMode(value: BLEND_MODES); /** * The blend mode to be applied to the sprite. Controls how pixels are blended when rendering. * * Setting to 'normal' will reset to default blending. * > [!NOTE] More blend modes are available after importing the `pixi.js/advanced-blend-modes` sub-export. * @example * ```ts * // Basic blend modes * sprite.blendMode = 'add'; // Additive blending * sprite.blendMode = 'multiply'; // Multiply colors * sprite.blendMode = 'screen'; // Screen blend * * // Reset blend mode * sprite.blendMode = 'normal'; // Normal blending * ``` * @default 'normal' * @see {@link Container#alpha} For transparency * @see {@link Container#tint} For color adjustments */ get blendMode(): BLEND_MODES; /** * The visibility of the object. If false the object will not be drawn, * and the transform will not be updated. * @example * ```ts * // Basic visibility toggle * sprite.visible = false; // Hide sprite * sprite.visible = true; // Show sprite * ``` * @default true * @see {@link Container#renderable} For render-only control * @see {@link Container#alpha} For transparency */ get visible(): boolean; set visible(value: boolean); /** @ignore */ get culled(): boolean; /** @ignore */ set culled(value: boolean); /** * Controls whether this object can be rendered. If false the object will not be drawn, * but the transform will still be updated. This is different from visible, which skips * transform updates. * @example * ```ts * // Basic render control * sprite.renderable = false; // Skip rendering * sprite.renderable = true; // Enable rendering * ``` * @default true * @see {@link Container#visible} For skipping transform updates * @see {@link Container#alpha} For transparency */ get renderable(): boolean; set renderable(value: boolean); /** * Whether or not the object should be rendered. * @advanced */ get isRenderable(): boolean; /** * Removes all internal references and listeners as well as removes children from the display list. * Do not use a Container after calling `destroy`. * @param options - Options parameter. A boolean will act as if all options * have been set to that value * @example * ```ts * container.destroy(); * container.destroy(true); * container.destroy({ children: true }); * container.destroy({ children: true, texture: true, textureSource: true }); * ``` */ destroy(options?: DestroyOptions): void; } /** * The type of the pointer event to listen for. * @category accessibility * @standard * @see https://developer.mozilla.org/en-US/docs/Web/CSS/pointer-events */ export type PointerEvents = "auto" | "none" | "visiblePainted" | "visibleFill" | "visibleStroke" | "visible" | "painted" | "fill" | "stroke" | "all" | "inherit"; /** * When `accessible` is enabled on any display object, these properties will affect its accessibility. * @example * const container = new Container(); * container.accessible = true; * container.accessibleTitle = 'My Container'; * container.accessibleHint = 'This is a container'; * container.tabIndex = 0; * @category accessibility * @standard */ export interface AccessibleOptions { /** * Flag for if the object is accessible. If true AccessibilityManager will overlay a * shadow div with attributes set * @default false * @example * ```js * const container = new Container(); * container.accessible = true; * ``` */ accessible: boolean; /** * Sets the title attribute of the shadow div * If accessibleTitle AND accessibleHint has not been this will default to 'container [tabIndex]' * @type {string} * @default null * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessibleTitle = 'My Container'; * ``` */ accessibleTitle: string | null; /** * Sets the aria-label attribute of the shadow div * @default null * @advanced * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessibleHint = 'This is a container'; * ``` */ accessibleHint: string | null; /** * Sets the tabIndex of the shadow div. You can use this to set the order of the * elements when using the tab key to navigate. * @default 0 * @example * ```js * const container = new Container(); * container.accessible = true; * container.tabIndex = 0; * * const sprite = new Sprite(texture); * sprite.accessible = true; * sprite.tabIndex = 1; * ``` */ tabIndex: number; /** * Specify the type of div the accessible layer is. Screen readers treat the element differently * depending on this type. Defaults to button. * @default 'button' * @type {string} * @advanced * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessibleType = 'button'; // or 'link', 'checkbox', etc. * ``` */ accessibleType: keyof HTMLElementTagNameMap; /** * Specify the pointer-events the accessible div will use * Defaults to auto. * @default 'auto' * @type {PointerEvents} * @advanced * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessiblePointerEvents = 'none'; // or 'auto', 'visiblePainted', etc. * ``` */ accessiblePointerEvents: PointerEvents; /** * Sets the text content of the shadow * @default null * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessibleText = 'This is a container'; * ``` */ accessibleText: string | null; /** * Setting to false will prevent any children inside this container to * be accessible. Defaults to true. * @default true * @example * ```js * const container = new Container(); * container.accessible = true; * container.accessibleChildren = false; // This will prevent any children from being accessible * * const sprite = new Sprite(texture); * sprite.accessible = true; // This will not work since accessibleChildren is false * ``` */ accessibleChildren: boolean; } /** * The Accessibility object is attached to the {@link Container}. * @private */ export interface AccessibleTarget extends AccessibleOptions { /** @private */ _accessibleActive: boolean; /** @private */ _accessibleDiv: AccessibleHTMLElement | null; /** @private */ _renderId: number; } /** @internal */ export interface AccessibleHTMLElement extends HTMLElement { type?: string; container?: Container; } /** * Default property values of accessible objects * used by {@link AccessibilitySystem}. * @internal * @example * import { accessibleTarget } from 'pixi.js'; * * function MyObject() {} * Object.assign(MyObject.prototype, accessibleTarget); */ export declare const accessibilityTarget: AccessibleTarget; /** * The result of the mobile device detection system. * Provides detailed information about device type and platform. * @example * ```ts * // Type usage with isMobile * const deviceInfo: isMobileResult = isMobile; * * // Check device categories * if (deviceInfo.apple.device) { * console.log('iOS Device Details:', { * isPhone: deviceInfo.apple.phone, * isTablet: deviceInfo.apple.tablet, * isUniversal: deviceInfo.apple.universal * }); * } * * // Platform-specific checks * const platformInfo = { * isApple: deviceInfo.apple.device, * isAndroid: deviceInfo.android.device, * isAmazon: deviceInfo.amazon.device, * isWindows: deviceInfo.windows.device * }; * ``` * @category utils * @standard */ export type isMobileResult = { /** * Apple device detection information. * Provides detailed iOS device categorization. * @example * ```ts * // iOS device checks * if (isMobile.apple.device) { * if (isMobile.apple.tablet) { * // iPad-specific code * useTabletLayout(); * } else if (isMobile.apple.phone) { * // iPhone-specific code * usePhoneLayout(); * } * } * ``` */ apple: { /** Whether the device is an iPhone */ phone: boolean; /** Whether the device is an iPod Touch */ ipod: boolean; /** Whether the device is an iPad */ tablet: boolean; /** Whether app is running in iOS universal mode */ universal: boolean; /** Whether device is any Apple mobile device */ device: boolean; }; /** * Amazon device detection information. * Identifies Amazon Fire tablets and phones. * @example * ```ts * // Amazon Fire tablet detection * if (isMobile.amazon.tablet) { * // Fire tablet optimizations * optimizeForFireTablet(); * } * ``` */ amazon: { /** Whether device is a Fire Phone */ phone: boolean; /** Whether device is a Fire Tablet */ tablet: boolean; /** Whether device is any Amazon mobile device */ device: boolean; }; /** * Android device detection information. * Categorizes Android phones and tablets. * @example * ```ts * // Android device handling * if (isMobile.android.device) { * // Check specific type * const deviceType = isMobile.android.tablet ? * 'tablet' : 'phone'; * console.log(`Android ${deviceType} detected`); * } * ``` */ android: { /** Whether device is an Android phone */ phone: boolean; /** Whether device is an Android tablet */ tablet: boolean; /** Whether device is any Android device */ device: boolean; }; /** * Windows device detection information. * Identifies Windows phones and tablets. * @example * ```ts * // Windows device checks * if (isMobile.windows.tablet) { * // Surface tablet optimizations * enableTouchFeatures(); * } * ``` */ windows: { /** Whether device is a Windows Phone */ phone: boolean; /** Whether device is a Windows tablet */ tablet: boolean; /** Whether device is any Windows mobile device */ device: boolean; }; /** * Other device detection information. * Covers additional platforms and browsers. * @example * ```ts * // Check other platforms * if (isMobile.other.blackberry10) { * // BlackBerry 10 specific code * } else if (isMobile.other.chrome) { * // Chrome mobile specific code * } * ``` */ other: { /** Whether device is a BlackBerry */ blackberry: boolean; /** Whether device is a BlackBerry 10 */ blackberry10: boolean; /** Whether browser is Opera Mobile */ opera: boolean; /** Whether browser is Firefox Mobile */ firefox: boolean; /** Whether browser is Chrome Mobile */ chrome: boolean; /** Whether device is any other mobile device */ device: boolean; }; /** * Whether the device is any type of phone. * Combines detection across all platforms. * @example * ```ts * // Check if device is a phone * if (isMobile.phone) { * console.log('Running on a mobile phone'); * } * ``` */ phone: boolean; /** * Whether the device is any type of tablet. * Combines detection across all platforms. * @example * ```ts * // Check if device is a tablet * if (isMobile.tablet) { * console.log('Running on a mobile tablet'); * } * ``` */ tablet: boolean; /** * Whether the device is any type of mobile device. * True if any mobile platform is detected. * @example * ```ts * // Check if device is mobile * if (isMobile.any) { * console.log('Running on a mobile device'); * } * ``` */ any: boolean; }; /** * Detects whether the device is mobile and what type of mobile device it is. * Provides a comprehensive detection system for mobile platforms and devices. * @example * ```ts * import { isMobile } from 'pixi.js'; * * // Check specific device types * if (isMobile.apple.tablet) { * console.log('Running on iPad'); * } * * // Check platform categories * if (isMobile.android.any) { * console.log('Running on Android'); * } * * // Conditional rendering * if (isMobile.phone) { * renderer.resolution = 2; * view.style.width = '100vw'; * } * ``` * @remarks * - Detects all major mobile platforms * - Distinguishes between phones and tablets * - Updates when navigator changes * - Common in responsive design * @category utils * @standard * @see {@link isMobileResult} For full type definition */ export declare const isMobile: isMobileResult; /** * Initialisation options for the accessibility system when used with an Application. * @category accessibility * @advanced */ export interface AccessibilitySystemOptions { /** Options for the accessibility system */ accessibilityOptions?: AccessibilityOptions; } /** * The options for the accessibility system. * @category accessibility * @advanced */ export interface AccessibilityOptions { /** Whether to enable accessibility features on initialization instead of waiting for tab key */ enabledByDefault?: boolean; /** Whether to visually show the accessibility divs for debugging */ debug?: boolean; /** Whether to allow tab key press to activate accessibility features */ activateOnTab?: boolean; /** Whether to deactivate accessibility when mouse moves */ deactivateOnMouseMove?: boolean; } /** * The Accessibility system provides screen reader and keyboard navigation support for PixiJS content. * It creates an accessible DOM layer over the canvas that can be controlled programmatically or through user interaction. * * By default, the system activates when users press the tab key. This behavior can be customized through options: * ```js * const app = new Application({ * accessibilityOptions: { * // Enable immediately instead of waiting for tab * enabledByDefault: true, * // Disable tab key activation * activateOnTab: false, * // Show/hide accessibility divs * debug: false, * // Prevent accessibility from being deactivated when mouse moves * deactivateOnMouseMove: false, * } * }); * ``` * * The system can also be controlled programmatically by accessing the `renderer.accessibility` property: * ```js * app.renderer.accessibility.setAccessibilityEnabled(true); * ``` * * To make individual containers accessible: * ```js * container.accessible = true; * ``` * There are several properties that can be set on a Container to control its accessibility which can * be found here: {@link AccessibleOptions}. * @category accessibility * @standard */ export declare class AccessibilitySystem implements System { private readonly _mobileInfo; /** @ignore */ static extension: { readonly type: readonly [ ExtensionType.WebGLSystem, ExtensionType.WebGPUSystem ]; readonly name: "accessibility"; }; /** * The default options used by the system. * You can set these before initializing the {@link Application} to change the default behavior. * @example * ```js * import { AccessibilitySystem } from 'pixi.js'; * * AccessibilitySystem.defaultOptions.enabledByDefault = true; * * const app = new Application() * app.init() * ``` */ static defaultOptions: AccessibilityOptions; /** Whether accessibility divs are visible for debugging */ debug: boolean; /** Whether to activate on tab key press */ private _activateOnTab; /** Whether to deactivate accessibility when mouse moves */ private _deactivateOnMouseMove; /** * The renderer this accessibility manager works for. * @type {WebGLRenderer|WebGPURenderer} */ private _renderer; /** Internal variable, see isActive getter. */ private _isActive; /** Internal variable, see isMobileAccessibility getter. */ private _isMobileAccessibility; /** Button element for handling touch hooks. */ private _hookDiv; /** This is the dom element that will sit over the PixiJS element. This is where the div overlays will go. */ private _div; /** A simple pool for storing divs. */ private _pool; /** This is a tick used to check if an object is no longer being rendered. */ private _renderId; /** The array of currently active accessible items. */ private _children; /** Count to throttle div updates on android devices. */ private _androidUpdateCount; /** The frequency to update the div elements. */ private readonly _androidUpdateFrequency; private _canvasObserver; /** * @param {WebGLRenderer|WebGPURenderer} renderer - A reference to the current renderer */ constructor(renderer: Renderer, _mobileInfo?: isMobileResult); /** * Value of `true` if accessibility is currently active and accessibility layers are showing. * @type {boolean} * @readonly */ get isActive(): boolean; /** * Value of `true` if accessibility is enabled for touch devices. * @type {boolean} * @readonly */ get isMobileAccessibility(): boolean; /** * The DOM element that will sit over the PixiJS element. This is where the div overlays will go. * @readonly */ get hookDiv(): HTMLElement; /** * Creates the touch hooks. * @private */ private _createTouchHook; /** * Destroys the touch hooks. * @private */ private _destroyTouchHook; /** * Activating will cause the Accessibility layer to be shown. * This is called when a user presses the tab key. * @private */ private _activate; private _initAccessibilitySetup; /** * Deactivates the accessibility system. Removes listeners and accessibility elements. * @private */ private _deactivate; /** * This recursive function will run through the scene graph and add any new accessible objects to the DOM layer. * @private * @param {Container} container - The Container to check. */ private _updateAccessibleObjects; /** * Runner init called, view is available at this point. * @ignore */ init(options?: AccessibilitySystemOptions): void; /** * Updates the accessibility layer during rendering. * - Removes divs for containers no longer in the scene * - Updates the position and dimensions of the root div * - Updates positions of active accessibility divs * Only fires while the accessibility system is active. * @ignore */ postrender(): void; /** * private function that will visually add the information to the * accessibility div * @param {HTMLElement} div - */ private _updateDebugHTML; /** * Adjust the hit area based on the bounds of a display object * @param {Rectangle} hitArea - Bounds of the child */ private _capHitArea; /** * Creates or reuses a div element for a Container and adds it to the accessibility layer. * Sets up ARIA attributes, event listeners, and positioning based on the container's properties. * @private * @param {Container} container - The child to make accessible. */ private _addChild; /** * Dispatch events with the EventSystem. * @param e * @param type * @private */ private _dispatchEvent; /** * Maps the div button press to pixi's EventSystem (click) * @private * @param {MouseEvent} e - The click event. */ private _onClick; /** * Maps the div focus events to pixi's EventSystem (mouseover) * @private * @param {FocusEvent} e - The focus event. */ private _onFocus; /** * Maps the div focus events to pixi's EventSystem (mouseout) * @private * @param {FocusEvent} e - The focusout event. */ private _onFocusOut; /** * Is called when a key is pressed * @private * @param {KeyboardEvent} e - The keydown event. */ private _onKeyDown; /** * Is called when the mouse moves across the renderer element * @private * @param {MouseEvent} e - The mouse event. */ private _onMouseMove; /** * Destroys the accessibility system. Removes all elements and listeners. * > [!IMPORTANT] This is typically called automatically when the {@link Application} is destroyed. * > A typically user should not need to call this method directly. */ destroy(): void; /** * Enables or disables the accessibility system. * @param enabled - Whether to enable or disable accessibility. * @example * ```js * app.renderer.accessibility.setAccessibilityEnabled(true); // Enable accessibility * app.renderer.accessibility.setAccessibilityEnabled(false); // Disable accessibility * ``` */ setAccessibilityEnabled(enabled: boolean): void; } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface Container extends Partial { } // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface ContainerOptions extends Partial { } interface RendererSystems { accessibility: AccessibilitySystem; } } } /** * A callback which can be added to a ticker. * ```js * ticker.add(() => { * // do something every frame * }); * ``` * @category ticker * @standard */ export type TickerCallback = (this: T, ticker: Ticker) => any; /** * A Ticker class that runs an update loop that other objects listen to. * Used for managing animation frames and timing in a PixiJS application. * * It provides a way to add listeners that will be called on each frame, * allowing for smooth animations and updates. * * Animation frames are requested * only when necessary, e.g., when the ticker is started and the emitter has listeners. * @example * ```ts * // Basic ticker usage * const ticker = new Ticker(); * ticker.add((ticker) => { * // Update every frame * sprite.rotation += 0.1 * ticker.deltaTime; * }); * ticker.start(); * * // Control update priority * ticker.add( * (ticker) => { * // High priority updates run first * physics.update(ticker.deltaTime); * }, * undefined, * UPDATE_PRIORITY.HIGH * ); * * // One-time updates * ticker.addOnce(() => { * console.log('Runs on next frame only'); * }); * ``` * @see {@link TickerPlugin} For use with Application * @see {@link UPDATE_PRIORITY} For priority constants * @see {@link TickerCallback} For listener function type * @category ticker * @standard */ export declare class Ticker { /** * Target frame rate in frames per millisecond. * Used for converting deltaTime to a scalar time delta. * @example * ```ts * // Default is 0.06 (60 FPS) * console.log(Ticker.targetFPMS); // 0.06 * * // Calculate target frame duration * const frameDuration = 1 / Ticker.targetFPMS; // ≈ 16.67ms * * // Use in custom timing calculations * const deltaTime = elapsedMS * Ticker.targetFPMS; * ``` * @remarks * - Default is 0.06 (equivalent to 60 FPS) * - Used in deltaTime calculations * - Affects all ticker instances * @default 0.06 * @see {@link Ticker#deltaTime} For time scaling * @see {@link Ticker#FPS} For actual frame rate */ static targetFPMS: number; /** The private shared ticker instance */ private static _shared; /** The private system ticker instance */ private static _system; /** * Whether or not this ticker should invoke the method {@link Ticker#start|start} * automatically when a listener is added. * @example * ```ts * // Default behavior (manual start) * const ticker = new Ticker(); * ticker.autoStart = false; * ticker.add(() => { * // Won't run until ticker.start() is called * }); * * // Auto-start behavior * const autoTicker = new Ticker(); * autoTicker.autoStart = true; * autoTicker.add(() => { * // Runs immediately when added * }); * ``` * @default false * @see {@link Ticker#start} For manually starting the ticker * @see {@link Ticker#stop} For manually stopping the ticker */ autoStart: boolean; /** * Scalar time value from last frame to this frame. * Used for frame-based animations and updates. * * This value is capped by setting {@link Ticker#minFPS|minFPS} * and is scaled with {@link Ticker#speed|speed}. * > [!NOTE] The cap may be exceeded by scaling. * @example * ```ts * // Basic animation * ticker.add((ticker) => { * // Rotate sprite by 0.1 radians per frame, scaled by deltaTime * sprite.rotation += 0.1 * ticker.deltaTime; * }); * ``` */ deltaTime: number; /** * Scalar time elapsed in milliseconds from last frame to this frame. * Provides precise timing for animations and updates. * * This value is capped by setting {@link Ticker#minFPS|minFPS} * and is scaled with {@link Ticker#speed|speed}. * * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. * * Defaults to target frame time * * > [!NOTE] The cap may be exceeded by scaling. * @example * ```ts * // Animation timing * ticker.add((ticker) => { * // Use millisecond timing for precise animations * const progress = (ticker.deltaMS / animationDuration); * sprite.alpha = Math.min(1, progress); * }); * ``` * @default 16.66 */ deltaMS: number; /** * Time elapsed in milliseconds from last frame to this frame. * Provides raw timing information without modifications. * * Opposed to what the scalar {@link Ticker#deltaTime|deltaTime} * is based, this value is neither capped nor scaled. * * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. * * Defaults to target frame time * @example * ```ts * // Basic timing information * ticker.add((ticker) => { * console.log(`Raw frame time: ${ticker.elapsedMS}ms`); * }); * ``` * @default 16.66 */ elapsedMS: number; /** * The last time {@link Ticker#update|update} was invoked. * Used for calculating time deltas between frames. * * This value is also reset internally outside of invoking * update, but only when a new animation frame is requested. * * If the platform supports DOMHighResTimeStamp, * this value will have a precision of 1 µs. * @example * ```ts * // Basic timing check * ticker.add(() => { * const timeSinceStart = performance.now() - ticker.lastTime; * console.log(`Time running: ${timeSinceStart}ms`); * }); * ``` */ lastTime: number; /** * Factor of current {@link Ticker#deltaTime|deltaTime}. * Used to scale time for slow motion or fast-forward effects. * @example * ```ts * // Basic speed adjustment * ticker.speed = 0.5; // Half speed (slow motion) * ticker.speed = 2.0; // Double speed (fast forward) * * // Temporary speed changes * function slowMotion() { * const normalSpeed = ticker.speed; * ticker.speed = 0.2; * setTimeout(() => { * ticker.speed = normalSpeed; * }, 1000); * } * ``` */ speed: number; /** * Whether or not this ticker has been started. * * `true` if {@link Ticker#start|start} has been called. * `false` if {@link Ticker#stop|Stop} has been called. * * While `false`, this value may change to `true` in the * event of {@link Ticker#autoStart|autoStart} being `true` * and a listener is added. * @example * ```ts * // Check ticker state * const ticker = new Ticker(); * console.log(ticker.started); // false * * // Start and verify * ticker.start(); * console.log(ticker.started); // true * ``` */ started: boolean; /** The first listener. All new listeners added are chained on this. */ private _head; /** Internal current frame request ID */ private _requestId; /** * Internal value managed by minFPS property setter and getter. * This is the maximum allowed milliseconds between updates. */ private _maxElapsedMS; /** * Internal value managed by minFPS property setter and getter. * This is the minimum allowed milliseconds between updates. */ private _minElapsedMS; /** If enabled, deleting is disabled.*/ private _protected; /** The last time keyframe was executed. Maintains a relatively fixed interval with the previous value. */ private _lastFrame; /** * Internal tick method bound to ticker instance. * This is because in early 2015, Function.bind * is still 60% slower in high performance scenarios. * Also separating frame requests from update method * so listeners may be called at any time and with * any animation API, just invoke ticker.update(time). * @param time - Time since last tick. */ private readonly _tick; constructor(); /** * Conditionally requests a new animation frame. * If a frame has not already been requested, and if the internal * emitter has listeners, a new frame is requested. */ private _requestIfNeeded; /** Conditionally cancels a pending animation frame. */ private _cancelIfNeeded; /** * Conditionally requests a new animation frame. * If the ticker has been started it checks if a frame has not already * been requested, and if the internal emitter has listeners. If these * conditions are met, a new frame is requested. If the ticker has not * been started, but autoStart is `true`, then the ticker starts now, * and continues with the previous conditions to request a new frame. */ private _startIfPossible; /** * Register a handler for tick events. Calls continuously unless * it is removed or the ticker is stopped. * @example * ```ts * // Basic update handler * ticker.add((ticker) => { * // Update every frame * sprite.rotation += 0.1 * ticker.deltaTime; * }); * * // With specific context * const game = { * update(ticker) { * this.physics.update(ticker.deltaTime); * } * }; * ticker.add(game.update, game); * * // With priority * ticker.add( * (ticker) => { * // Runs before normal priority updates * physics.update(ticker.deltaTime); * }, * undefined, * UPDATE_PRIORITY.HIGH * ); * ``` * @param fn - The listener function to be added for updates * @param context - The listener context * @param priority - The priority for emitting (default: UPDATE_PRIORITY.NORMAL) * @returns This instance of a ticker * @see {@link Ticker#addOnce} For one-time handlers * @see {@link Ticker#remove} For removing handlers */ add(fn: TickerCallback, context?: T, priority?: number): this; /** * Add a handler for the tick event which is only executed once on the next frame. * @example * ```ts * // Basic one-time update * ticker.addOnce(() => { * console.log('Runs next frame only'); * }); * * // With specific context * const game = { * init(ticker) { * this.loadResources(); * console.log('Game initialized'); * } * }; * ticker.addOnce(game.init, game); * * // With priority * ticker.addOnce( * () => { * // High priority one-time setup * physics.init(); * }, * undefined, * UPDATE_PRIORITY.HIGH * ); * ``` * @param fn - The listener function to be added for one update * @param context - The listener context * @param priority - The priority for emitting (default: UPDATE_PRIORITY.NORMAL) * @returns This instance of a ticker * @see {@link Ticker#add} For continuous updates * @see {@link Ticker#remove} For removing handlers */ addOnce(fn: TickerCallback, context?: T, priority?: number): this; /** * Internally adds the event handler so that it can be sorted by priority. * Priority allows certain handler (user, AnimatedSprite, Interaction) to be run * before the rendering. * @private * @param listener - Current listener being added. * @returns This instance of a ticker */ private _addListener; /** * Removes any handlers matching the function and context parameters. * If no handlers are left after removing, then it cancels the animation frame. * @example * ```ts * // Basic removal * const onTick = () => { * sprite.rotation += 0.1; * }; * ticker.add(onTick); * ticker.remove(onTick); * * // Remove with context * const game = { * update(ticker) { * this.physics.update(ticker.deltaTime); * } * }; * ticker.add(game.update, game); * ticker.remove(game.update, game); * * // Remove all matching handlers * // (if same function was added multiple times) * ticker.add(onTick); * ticker.add(onTick); * ticker.remove(onTick); // Removes all instances * ``` * @param fn - The listener function to be removed * @param context - The listener context to be removed * @returns This instance of a ticker * @see {@link Ticker#add} For adding handlers * @see {@link Ticker#addOnce} For one-time handlers */ remove(fn: TickerCallback, context?: T): this; /** * The number of listeners on this ticker, calculated by walking through linked list. * @example * ```ts * // Check number of active listeners * const ticker = new Ticker(); * console.log(ticker.count); // 0 * * // Add some listeners * ticker.add(() => {}); * ticker.add(() => {}); * console.log(ticker.count); // 2 * * // Check after cleanup * ticker.destroy(); * console.log(ticker.count); // 0 * ``` * @readonly * @see {@link Ticker#add} For adding listeners * @see {@link Ticker#remove} For removing listeners */ get count(): number; /** * Starts the ticker. If the ticker has listeners a new animation frame is requested at this point. * @example * ```ts * // Basic manual start * const ticker = new Ticker(); * ticker.add(() => { * // Animation code here * }); * ticker.start(); * ``` * @see {@link Ticker#stop} For stopping the ticker * @see {@link Ticker#autoStart} For automatic starting * @see {@link Ticker#started} For checking ticker state */ start(): void; /** * Stops the ticker. If the ticker has requested an animation frame it is canceled at this point. * @example * ```ts * // Basic stop * const ticker = new Ticker(); * ticker.stop(); * ``` * @see {@link Ticker#start} For starting the ticker * @see {@link Ticker#started} For checking ticker state * @see {@link Ticker#destroy} For cleaning up the ticker */ stop(): void; /** * Destroy the ticker and don't use after this. Calling this method removes all references to internal events. * @example * ```ts * // Clean up with active listeners * const ticker = new Ticker(); * ticker.add(() => {}); * ticker.destroy(); // Removes all listeners * ``` * @see {@link Ticker#stop} For stopping without destroying * @see {@link Ticker#remove} For removing specific listeners */ destroy(): void; /** * Triggers an update. * * An update entails setting the * current {@link Ticker#elapsedMS|elapsedMS}, * the current {@link Ticker#deltaTime|deltaTime}, * invoking all listeners with current deltaTime, * and then finally setting {@link Ticker#lastTime|lastTime} * with the value of currentTime that was provided. * * This method will be called automatically by animation * frame callbacks if the ticker instance has been started * and listeners are added. * @example * ```ts * // Basic manual update * const ticker = new Ticker(); * ticker.update(performance.now()); * ``` * @param currentTime - The current time of execution (defaults to performance.now()) * @see {@link Ticker#deltaTime} For frame delta value * @see {@link Ticker#elapsedMS} For raw elapsed time */ update(currentTime?: number): void; /** * The frames per second at which this ticker is running. * The default is approximately 60 in most modern browsers. * > [!NOTE] This does not factor in the value of * > {@link Ticker#speed|speed}, which is specific * > to scaling {@link Ticker#deltaTime|deltaTime}. * @example * ```ts * // Basic FPS monitoring * ticker.add(() => { * console.log(`Current FPS: ${Math.round(ticker.FPS)}`); * }); * ``` * @readonly */ get FPS(): number; /** * Manages the maximum amount of milliseconds allowed to * elapse between invoking {@link Ticker#update|update}. * * This value is used to cap {@link Ticker#deltaTime|deltaTime}, * but does not effect the measured value of {@link Ticker#FPS|FPS}. * * When setting this property it is clamped to a value between * `0` and `Ticker.targetFPMS * 1000`. * @example * ```ts * // Set minimum acceptable frame rate * const ticker = new Ticker(); * ticker.minFPS = 30; // Never go below 30 FPS * * // Use with maxFPS for frame rate clamping * ticker.minFPS = 30; * ticker.maxFPS = 60; * * // Monitor delta capping * ticker.add(() => { * // Delta time will be capped based on minFPS * console.log(`Delta time: ${ticker.deltaTime}`); * }); * ``` * @default 10 */ get minFPS(): number; set minFPS(fps: number); /** * Manages the minimum amount of milliseconds required to * elapse between invoking {@link Ticker#update|update}. * * This will effect the measured value of {@link Ticker#FPS|FPS}. * * If it is set to `0`, then there is no limit; PixiJS will render as many frames as it can. * Otherwise it will be at least `minFPS` * @example * ```ts * // Set minimum acceptable frame rate * const ticker = new Ticker(); * ticker.maxFPS = 60; // Never go above 60 FPS * * // Use with maxFPS for frame rate clamping * ticker.minFPS = 30; * ticker.maxFPS = 60; * * // Monitor delta capping * ticker.add(() => { * // Delta time will be capped based on maxFPS * console.log(`Delta time: ${ticker.deltaTime}`); * }); * ``` * @default 0 */ get maxFPS(): number; set maxFPS(fps: number); /** * The shared ticker instance used by {@link AnimatedSprite} and by * {@link VideoSource} to update animation frames / video textures. * * It may also be used by {@link Application} if created with the `sharedTicker` option property set to true. * * The property {@link Ticker#autoStart|autoStart} is set to `true` for this instance. * Please follow the examples for usage, including how to opt-out of auto-starting the shared ticker. * @example * import { Ticker } from 'pixi.js'; * * const ticker = Ticker.shared; * // Set this to prevent starting this ticker when listeners are added. * // By default this is true only for the Ticker.shared instance. * ticker.autoStart = false; * * // FYI, call this to ensure the ticker is stopped. It should be stopped * // if you have not attempted to render anything yet. * ticker.stop(); * * // Call this when you are ready for a running shared ticker. * ticker.start(); * @example * import { autoDetectRenderer, Container } from 'pixi.js'; * * // You may use the shared ticker to render... * const renderer = autoDetectRenderer(); * const stage = new Container(); * document.body.appendChild(renderer.view); * ticker.add((time) => renderer.render(stage)); * * // Or you can just update it manually. * ticker.autoStart = false; * ticker.stop(); * const animate = (time) => { * ticker.update(time); * renderer.render(stage); * requestAnimationFrame(animate); * }; * animate(performance.now()); * @type {Ticker} * @readonly */ static get shared(): Ticker; /** * The system ticker instance used by {@link PrepareBase} for core timing * functionality that shouldn't usually need to be paused, unlike the `shared` * ticker which drives visual animations and rendering which may want to be paused. * * The property {@link Ticker#autoStart|autoStart} is set to `true` for this instance. * @type {Ticker} * @readonly * @advanced */ static get system(): Ticker; } type ResizeableRenderer = Pick; /** * Application options for the {@link ResizePlugin}. * These options control how your application handles window and element resizing. * @example * ```ts * // Auto-resize to window * await app.init({ resizeTo: window }); * * // Auto-resize to container element * await app.init({ resizeTo: document.querySelector('#game') }); * ``` * @category app * @standard */ export interface ResizePluginOptions { /** * Element to automatically resize the renderer to. * @example * ```ts * const app = new Application(); * await app.init({ * resizeTo: window, // Resize to the entire window * // or * resizeTo: document.querySelector('#game-container'), // Resize to a specific element * // or * resizeTo: null, // Disable auto-resize * }); * ``` * @default null */ resizeTo?: Window | HTMLElement; } /** * Middleware for Application's resize functionality. This plugin handles automatic * and manual resizing of your PixiJS application. * * Adds the following features to {@link Application}: * - `resizeTo`: Set an element to automatically resize to * - `resize`: Manually trigger a resize * - `queueResize`: Queue a resize for the next animation frame * - `cancelResize`: Cancel a queued resize * @example * ```ts * import { Application, ResizePlugin } from 'pixi.js'; * * // Create application * const app = new Application(); * * // Example 1: Auto-resize to window * await app.init({ resizeTo: window }); * * // Example 2: Auto-resize to specific element * const container = document.querySelector('#game-container'); * await app.init({ resizeTo: container }); * * // Example 3: Change resize target at runtime * app.resizeTo = window; // Enable auto-resize to window * app.resizeTo = null; // Disable auto-resize * ``` * @category app * @standard */ export declare class ResizePlugin { /** @ignore */ static extension: ExtensionMetadata; /** @internal */ static resizeTo: Window | HTMLElement; /** @internal */ static resize: () => void; /** @internal */ static renderer: ResizeableRenderer; /** @internal */ static queueResize: () => void; /** @internal */ static render: () => void; /** @internal */ private static _resizeId; /** @internal */ private static _resizeTo; /** @internal */ private static _cancelResize; /** * Initialize the plugin with scope of application instance * @private * @param {object} [options] - See application options */ static init(options: ResizePluginOptions): void; /** * Clean up the ticker, scoped to application * @private */ static destroy(): void; } /** * Application options for the {@link TickerPlugin}. * These options control the animation loop and update cycle of your PixiJS application. * @example * ```ts * import { Application } from 'pixi.js'; * * // Basic setup with default options * const app = new Application(); * await app.init({ * autoStart: true, // Start animation loop automatically * sharedTicker: false // Use dedicated ticker instance * }); * * // Advanced setup with shared ticker * const app2 = new Application(); * await app2.init({ * autoStart: false, // Don't start automatically * sharedTicker: true // Use global shared ticker * }); * * // Start animation when ready * app2.start(); * ``` * @remarks * The ticker is the heart of your application's animation system. It: * - Manages the render loop * - Provides accurate timing information * - Handles frame-based updates * - Supports priority-based execution order * @see {@link Ticker} For detailed ticker functionality * @see {@link UPDATE_PRIORITY} For update priority constants * @category app * @standard */ export interface TickerPluginOptions { /** * Controls whether the animation loop starts automatically after initialization. * > [!IMPORTANT] * > Setting this to `false` does NOT stop the shared ticker even if `sharedTicker` is `true`. * > You must stop the shared ticker manually if needed. * @example * ```ts * // Auto-start (default behavior) * await app.init({ autoStart: true }); * * // Manual start * await app.init({ autoStart: false }); * app.start(); // Start when ready * ``` * @default true */ autoStart?: boolean; /** * Controls whether to use the shared global ticker or create a new instance. * * The shared ticker is useful when you have multiple instances that should sync their updates. * However, it has some limitations regarding update order control. * * Update Order: * 1. System ticker (always runs first) * 2. Shared ticker (if enabled) * 3. App ticker (if using own ticker) * @example * ```ts * // Use shared ticker (global instance) * await app.init({ sharedTicker: true }); * * // Use dedicated ticker (default) * await app.init({ sharedTicker: false }); * * // Access ticker properties * console.log(app.ticker.FPS); // Current FPS * console.log(app.ticker.deltaMS); // MS since last update * ``` * @default false */ sharedTicker?: boolean; } /** * Middleware for Application's {@link Ticker} functionality. This plugin manages the * animation loop and update cycle of your PixiJS application. * * Adds the following features to {@link Application}: * - `ticker`: Access to the application's ticker * - `start`: Start the animation loop * - `stop`: Stop the animation loop * @example * ```ts * import { Application, TickerPlugin, extensions } from 'pixi.js'; * * // Create application * const app = new Application(); * * // Example 1: Basic ticker usage (default autoStart) * await app.init({ autoStart: true }); // Starts ticker automatically * * // Example 2: Manual ticker control * await app.init({ autoStart: false }); // Don't start automatically * app.start(); // Start manually * app.stop(); // Stop manually * * // Example 3: Add custom update logic * app.ticker.add((ticker) => { * // Run every frame, delta is the time since last update * sprite.rotation += 0.1 * ticker.deltaTime; * }); * * // Example 4: Control update priority * import { UPDATE_PRIORITY } from 'pixi.js'; * * app.ticker.add( * (ticker) => { * // Run before normal priority updates * }, * null, * UPDATE_PRIORITY.HIGH * ); * * // Example 5: One-time update * app.ticker.addOnce(() => { * console.log('Runs next frame only'); * }); * ``` * @see {@link Ticker} For detailed ticker functionality * @see {@link UPDATE_PRIORITY} For priority constants * @category app * @standard */ export declare class TickerPlugin { /** @ignore */ static extension: ExtensionMetadata; /** @internal */ static start: () => void; /** @internal */ static stop: () => void; /** @internal */ private static _ticker; /** @internal */ static ticker: Ticker; /** * Initialize the plugin with scope of application instance * @private * @param {object} [options] - See application options */ static init(options?: PixiMixins.ApplicationOptions): void; /** * Clean up the ticker, scoped to application. * @private */ static destroy(): void; } declare global { namespace PixiMixins { // Extend the Application interface with resize and ticker functionalities interface Application { /** * Element to automatically resize the renderer to. * @example * ```ts * const app = new Application(); * await app.init({ * resizeTo: window, // Resize to the entire window * // or * resizeTo: document.querySelector('#game-container'), // Resize to a specific element * // or * resizeTo: null, // Disable auto-resize * }); * ``` * @default null */ resizeTo: Window | HTMLElement; /** * Element to automatically resize the renderer to. * > [!IMPORTANT] * > You do not need to call this method manually in most cases. * > A `resize` event will be dispatched automatically when the `resizeTo` element changes size. * @remarks * - Automatically resizes the renderer to match the size of the `resizeTo` element * - If `resizeTo` is `null`, auto-resizing is disabled * - If `resizeTo` is a `Window`, it resizes to the full window size * - If `resizeTo` is an `HTMLElement`, it resizes to the element's bounding client rectangle * @example * ```ts * const app = new Application(); * await app.init({ * resizeTo: window, // Resize to the entire window * // or * resizeTo: document.querySelector('#game-container'), // Resize to a specific element * // or * resizeTo: null, // Disable auto-resize * }); * * // Manually trigger a resize * app.resize(); * ``` * @default null */ resize(): void; /** * Queue a resize operation for the next animation frame. This method is throttled * and optimized for frequent calls. * > [!IMPORTANT] * > You do not need to call this method manually in most cases. * > A `resize` event will be dispatched automatically when the `resizeTo` element changes size. * @remarks * - Safe to call multiple times per frame * - Only one resize will occur on next frame * - Cancels any previously queued resize * @example * ```ts * app.queueResize(); // Queue for next frame * ``` */ queueResize(): void; /** * Cancel any pending resize operation that was queued with `queueResize()`. * @remarks * - Clears the resize operation queued for next frame * @example * ```ts * // Queue a resize * app.queueResize(); * * // Cancel if needed * app.cancelResize(); * ``` */ cancelResize(): void; /** * The application's ticker instance that manages the update/render loop. * @example * ```ts * // Basic animation * app.ticker.add((ticker) => { * sprite.rotation += 0.1 * ticker.deltaTime; * }); * * // Control update priority * app.ticker.add( * (ticker) => { * // Physics update (runs first) * }, * undefined, * UPDATE_PRIORITY.HIGH * ); * * // One-time update * app.ticker.addOnce(() => { * console.log('Runs next frame only'); * }); * * // Access timing info * console.log(app.ticker.FPS); // Current FPS * console.log(app.ticker.deltaTime); // Scaled time delta * console.log(app.ticker.deltaMS); // MS since last update * ``` * @see {@link Ticker} For detailed ticker functionality * @see {@link UPDATE_PRIORITY} For priority constants */ ticker: Ticker; /** * Stops the render/update loop. * @example * ```ts * // Stop the application * app.stop(); * // ... custom update logic ... * app.render(); // Manual render * ``` */ stop(): void; /** * Starts the render/update loop. * @example * ```ts * // Initialize without auto-start * await app.init({ autoStart: false }); * * // Start when ready * app.start(); * ``` */ start(): void; } // Combine ResizePluginOptions and TickerPluginOptions into ApplicationOptions interface ApplicationOptions extends ResizePluginOptions, TickerPluginOptions { } } } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface AssetsPreferences { } } } /** * The CullingMixin interface provides properties and methods for managing culling behavior * of a display object. Culling is the process of determining whether an object should be rendered * based on its visibility within the current view or frame. * * Key Features: * - Custom culling areas for better performance * - Per-object culling control * - Child culling management * @example * ```ts * // Enable culling for a container * const container = new Container(); * container.cullable = true; * * // Set custom cull area for better performance * container.cullArea = new Rectangle(0, 0, 800, 600); * * // Disable child culling for static scenes * container.cullableChildren = false; * ``` * @category scene * @standard */ export interface CullingMixinConstructor { /** * Custom shape used for culling calculations instead of object bounds. * Defined in local space coordinates relative to the object. * > [!NOTE] * > Setting this to a custom Rectangle allows you to define a specific area for culling, * > which can improve performance by avoiding expensive bounds calculations. * @example * ```ts * const container = new Container(); * * // Define custom culling boundary * container.cullArea = new Rectangle(0, 0, 800, 600); * * // Reset to use object bounds * container.cullArea = null; * ``` * @remarks * - Improves performance by avoiding bounds calculations * - Useful for containers with many children * - Set to null to use object bounds * @default null */ cullArea: Rectangle; /** * Controls whether this object should be culled when out of view. * When true, the object will not be rendered if its bounds are outside the visible area. * @example * ```ts * const sprite = new Sprite(texture); * * // Enable culling * sprite.cullable = true; * * // Force object to always render * sprite.cullable = false; * ``` * @remarks * - Does not affect transform updates * - Applies to this object only * - Children follow their own cullable setting * @default false */ cullable: boolean; /** * Controls whether children of this container can be culled. * When false, skips recursive culling checks for better performance. * @example * ```ts * const container = new Container(); * * // Enable container culling * container.cullable = true; * * // Disable child culling for performance * container.cullableChildren = false; * * // Children will always render if container is visible * container.addChild(sprite1, sprite2, sprite3); * ``` * @remarks * - Improves performance for static scenes * - Useful when children are always within container bounds * - Parent culling still applies * @default true */ cullableChildren: boolean; } /** @internal */ export declare const cullingMixin: CullingMixinConstructor; /** * Application options for the {@link CullerPlugin}. * These options control how your application handles culling of display objects. * @example * ```ts * import { Application } from 'pixi.js'; * * // Create application * const app = new Application(); * await app.init({ * culler: { * updateTransform: false // Skip updating transforms for culled objects * } * }); * ``` * @category app * @standard */ export interface CullerPluginOptions { /** * Options for the culler behavior. * @example * ```ts * // Basic culling options * const app = new Application(); * await app.init({ * culler: {...} * }); * ``` */ culler?: { /** * Update the transform of culled objects. * * > [!IMPORTANT] Keeping this as `false` can improve performance by avoiding unnecessary calculations, * > however, the transform used for culling may not be up-to-date if the object has moved since the last render. * @default true * @example * ```ts * const app = new Application(); * await app.init({ * culler: { * updateTransform: false // Skip updating transforms for culled objects * } * }); * ``` */ updateTransform?: boolean; }; } /** * An {@link Application} plugin that automatically culls (hides) display objects that are outside * the visible screen area. This improves performance by not rendering objects that aren't visible. * * Key Features: * - Automatic culling based on screen boundaries * - Configurable culling areas and behavior per container * - Can improve rendering performance * @example * ```ts * import { Application, CullerPlugin, Container, Rectangle } from 'pixi.js'; * * // Register the plugin * extensions.add(CullerPlugin); * * // Create application * const app = new Application(); * await app.init({...}); * * // Create a container with culling enabled * const container = new Container(); * container.cullable = true; // Enable culling for this container * container.cullableChildren = true; // Enable culling for children (default) * app.stage.addChild(container); * * // Optional: Set custom cull area to avoid expensive bounds calculations * container.cullArea = new Rectangle(0, 0, app.screen.width, app.screen.height); * * // Add many sprites to the group * for (let j = 0; j < 100; j++) { * const sprite = Sprite.from('texture.png'); * sprite.x = Math.random() * 2000; * sprite.y = Math.random() * 2000; * * sprite.cullable = true; // Enable culling for each sprite * * // Set cullArea if needed * // sprite.cullArea = new Rectangle(0, 0, 100, 100); // Optional * * // Add to container * container.addChild(sprite); * } * ``` * @remarks * To enable culling, you must set the following properties on your containers: * - `cullable`: Set to `true` to enable culling for the container * - `cullableChildren`: Set to `true` to enable culling for children (default) * - `cullArea`: Optional custom Rectangle for culling bounds * * Performance Tips: * - Group objects that are spatially related * - Use `cullArea` for containers with many children to avoid bounds calculations * - Set `cullableChildren = false` for containers that are always fully visible * @category app * @standard * @see {@link Culler} For the underlying culling implementation * @see {@link CullingMixinConstructor} For culling properties documentation */ export declare class CullerPlugin { /** @ignore */ static extension: ExtensionMetadata; /** @internal */ static renderer: Renderer; /** @internal */ static stage: Container; /** @internal */ static render: () => void; private static _renderRef; /** * Initialize the plugin with scope of application instance * @private * @param {object} [options] - See application options */ static init(options?: PixiMixins.ApplicationOptions): void; /** @internal */ static destroy(): void; } declare global { namespace PixiMixins { // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface Container extends Partial { } // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface ContainerOptions extends Partial { } // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface ApplicationOptions extends Partial { } } } /** * Options for configuring a {@link DOMContainer}. * Controls how DOM elements are integrated into the PixiJS scene graph. * @example * ```ts * // Create with a custom element * const domContainer = new DOMContainer({ * element: document.createElement('input'), * anchor: { x: 0.5, y: 0.5 } // or anchor: 0.5 to center both x and y * }); * ``` * @category scene * @standard * @noInheritDoc */ export interface DOMContainerOptions extends ViewContainerOptions { /** * The DOM element to use for the container. * Can be any HTML element like div, input, textarea, etc. * * If not provided, creates a new div element. * @default document.createElement('div') */ element?: HTMLElement; /** * The anchor point of the container. * - Can be a single number to set both x and y * - Can be a point-like object with x,y coordinates * - (0,0) is top-left * - (1,1) is bottom-right * - (0.5,0.5) is center * @default 0 */ anchor?: PointData | number; } /** * The DOMContainer object is used to render DOM elements within the PixiJS scene graph. * It allows you to integrate HTML elements into your PixiJS application while maintaining * proper transform hierarchy and visibility. * * DOMContainer is especially useful for rendering standard DOM elements * that handle user input, such as `` or `