diff --git a/src/lib/dom.generated.d.ts b/src/lib/dom.generated.d.ts index 03eb2a4a961..bb408a34976 100644 --- a/src/lib/dom.generated.d.ts +++ b/src/lib/dom.generated.d.ts @@ -183,6 +183,10 @@ interface ClientQueryOptions { type?: ClientTypes; } +interface ClipboardEventInit extends EventInit { + clipboardData?: DataTransfer | null; +} + interface CloseEventInit extends EventInit { code?: number; reason?: string; @@ -428,6 +432,10 @@ interface EventModifierInit extends UIEventInit { shiftKey?: boolean; } +interface EventSourceInit { + withCredentials?: boolean; +} + interface ExceptionInformation { domain?: string | null; } @@ -459,12 +467,16 @@ interface FocusOptions { preventScroll?: boolean; } +interface FullscreenOptions { + navigationUI?: FullscreenNavigationUI; +} + interface GainOptions extends AudioNodeOptions { gain?: number; } interface GamepadEventInit extends EventInit { - gamepad?: Gamepad; + gamepad: Gamepad; } interface GetNotificationOptions { @@ -599,15 +611,17 @@ interface MediaEncryptedEventInit extends EventInit { } interface MediaKeyMessageEventInit extends EventInit { - message?: ArrayBuffer | null; - messageType?: MediaKeyMessageType; + message: ArrayBuffer; + messageType: MediaKeyMessageType; } interface MediaKeySystemConfiguration { audioCapabilities?: MediaKeySystemMediaCapability[]; distinctiveIdentifier?: MediaKeysRequirement; initDataTypes?: string[]; + label?: string; persistentState?: MediaKeysRequirement; + sessionTypes?: string[]; videoCapabilities?: MediaKeySystemMediaCapability[]; } @@ -724,6 +738,8 @@ interface MouseEventInit extends EventModifierInit { buttons?: number; clientX?: number; clientY?: number; + movementX?: number; + movementY?: number; relatedTarget?: EventTarget | null; screenX?: number; screenY?: number; @@ -1442,6 +1458,11 @@ interface ServiceWorkerMessageEventInit extends EventInit { source?: ServiceWorker | MessagePort | null; } +interface ShadowRootInit { + delegatesFocus?: boolean; + mode: ShadowRootMode; +} + interface StereoPannerOptions extends AudioNodeOptions { pan?: number; } @@ -1609,6 +1630,7 @@ interface EventListener { (evt: Event): void; } +/** The ANGLE_instanced_arrays extension is part of the WebGL API and allows to draw the same object, or groups of similar objects multiple times, if they share the same vertex data, primitive count and type. */ interface ANGLE_instanced_arrays { drawArraysInstancedANGLE(mode: GLenum, first: GLint, count: GLsizei, primcount: GLsizei): void; drawElementsInstancedANGLE(mode: GLenum, count: GLsizei, type: GLenum, offset: GLintptr, primcount: GLsizei): void; @@ -1616,6 +1638,7 @@ interface ANGLE_instanced_arrays { readonly VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE: GLenum; } +/** The AbortController interface represents a controller object that allows you to abort one or more DOM requests as and when desired. */ interface AbortController { /** * Returns the AbortSignal object associated with this object. @@ -1634,16 +1657,17 @@ declare var AbortController: { }; interface AbortSignalEventMap { - "abort": ProgressEvent; + "abort": Event; } +/** The AbortSignal interface represents a signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ interface AbortSignal extends EventTarget { /** * Returns true if this AbortSignal's AbortController has signaled to abort, and false * otherwise. */ readonly aborted: boolean; - onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null; + onabort: ((this: AbortSignal, ev: Event) => any) | null; addEventListener(type: K, listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void; addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void; removeEventListener(type: K, listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any, options?: boolean | EventListenerOptions): void; @@ -1688,6 +1712,7 @@ interface AesCmacParams extends Algorithm { length: number; } +/** The AnalyserNode interface represents a node able to provide real-time frequency and time-domain analysis information. It is an AudioNode that passes the audio stream unchanged from the input to the output, but allows you to take the generated data, process it, and create audio visualizations. */ interface AnalyserNode extends AudioNode { fftSize: number; readonly frequencyBinCount: number; @@ -1756,6 +1781,7 @@ declare var AnimationEffect: { new(): AnimationEffect; }; +/** The AnimationEvent interface represents events providing information related to animations. */ interface AnimationEvent extends Event { readonly animationName: string; readonly elapsedTime: number; @@ -1845,6 +1871,7 @@ declare var ApplicationCache: { readonly UPDATEREADY: number; }; +/** This type represents a DOM element's attribute as an object. In most DOM methods, you will probably directly retrieve the attribute as a string (e.g., Element.getAttribute(), but certain functions (e.g., Element.getAttributeNode()) or means of iterating give Attr types. */ interface Attr extends Node { readonly localName: string; readonly name: string; @@ -1860,6 +1887,7 @@ declare var Attr: { new(): Attr; }; +/** Objects of these types are designed to hold small audio snippets, typically less than 45 s. For longer sounds, objects implementing the MediaElementAudioSourceNode are more suitable. The buffer contains data in the following format:  non-interleaved IEEE754 32-bit linear PCM with a nominal range between -1 and +1, that is, 32bits floating point buffer, with each samples between -1.0 and 1.0. If the AudioBuffer has multiple channels, they are stored in separate buffer. */ interface AudioBuffer { readonly duration: number; readonly length: number; @@ -1875,6 +1903,7 @@ declare var AudioBuffer: { new(options: AudioBufferOptions): AudioBuffer; }; +/** The AudioBufferSourceNode interface is an AudioScheduledSourceNode which represents an audio source consisting of in-memory audio data, stored in an AudioBuffer. It's especially useful for playing back audio which has particularly stringent timing accuracy requirements, such as for sounds that must match a specific rhythm and can be kept in memory rather than being played from disk or the network. */ interface AudioBufferSourceNode extends AudioScheduledSourceNode { buffer: AudioBuffer | null; readonly detune: AudioParam; @@ -1894,6 +1923,7 @@ declare var AudioBufferSourceNode: { new(context: BaseAudioContext, options?: AudioBufferSourceOptions): AudioBufferSourceNode; }; +/** The AudioContext interface represents an audio-processing graph built from audio modules linked together, each represented by an AudioNode. */ interface AudioContext extends BaseAudioContext { readonly baseLatency: number; readonly outputLatency: number; @@ -1915,6 +1945,7 @@ declare var AudioContext: { new(contextOptions?: AudioContextOptions): AudioContext; }; +/** AudioDestinationNode has no output (as it is the output, no more AudioNode can be linked after it in the audio graph) and one input. The number of channels in the input must be between 0 and the maxChannelCount value or an exception is raised. */ interface AudioDestinationNode extends AudioNode { readonly maxChannelCount: number; } @@ -1924,6 +1955,7 @@ declare var AudioDestinationNode: { new(): AudioDestinationNode; }; +/** The AudioListener interface represents the position and orientation of the unique person listening to the audio scene, and is used in audio spatialization. All PannerNodes spatialize in relation to the AudioListener stored in the BaseAudioContext.listener attribute. */ interface AudioListener { readonly forwardX: AudioParam; readonly forwardY: AudioParam; @@ -1945,6 +1977,7 @@ declare var AudioListener: { new(): AudioListener; }; +/** The AudioNode interface is a generic interface for representing an audio processing module. Examples include: */ interface AudioNode extends EventTarget { channelCount: number; channelCountMode: ChannelCountMode; @@ -1968,6 +2001,7 @@ declare var AudioNode: { new(): AudioNode; }; +/** The Web Audio API's AudioParam interface represents an audio-related parameter, usually a parameter of an AudioNode (such as GainNode.gain). */ interface AudioParam { automationRate: AutomationRate; readonly defaultValue: number; @@ -1997,6 +2031,7 @@ declare var AudioParamMap: { new(): AudioParamMap; }; +/** The Web Audio API AudioProcessingEvent represents events that occur when a ScriptProcessorNode input buffer is ready to be processed. */ interface AudioProcessingEvent extends Event { readonly inputBuffer: AudioBuffer; readonly outputBuffer: AudioBuffer; @@ -2027,13 +2062,14 @@ declare var AudioScheduledSourceNode: { new(): AudioScheduledSourceNode; }; +/** The AudioTrack interface represents a single audio track from one of the HTML media elements,