videoTexture.ts 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475
  1. import { Observable } from "../../Misc/observable";
  2. import { Tools } from "../../Misc/tools";
  3. import { Logger } from "../../Misc/logger";
  4. import { Nullable } from "../../types";
  5. import { Scene } from "../../scene";
  6. import { Engine } from "../../Engines/engine";
  7. import { Texture } from "../../Materials/Textures/texture";
  8. import "../../Engines/Extensions/engine.videoTexture";
  9. /**
  10. * Settings for finer control over video usage
  11. */
  12. export interface VideoTextureSettings {
  13. /**
  14. * Applies `autoplay` to video, if specified
  15. */
  16. autoPlay?: boolean;
  17. /**
  18. * Applies `loop` to video, if specified
  19. */
  20. loop?: boolean;
  21. /**
  22. * Automatically updates internal texture from video at every frame in the render loop
  23. */
  24. autoUpdateTexture: boolean;
  25. /**
  26. * Image src displayed during the video loading or until the user interacts with the video.
  27. */
  28. poster?: string;
  29. }
  30. /**
  31. * If you want to display a video in your scene, this is the special texture for that.
  32. * This special texture works similar to other textures, with the exception of a few parameters.
  33. * @see https://doc.babylonjs.com/how_to/video_texture
  34. */
  35. export class VideoTexture extends Texture {
  36. /**
  37. * Tells whether textures will be updated automatically or user is required to call `updateTexture` manually
  38. */
  39. public readonly autoUpdateTexture: boolean;
  40. /**
  41. * The video instance used by the texture internally
  42. */
  43. public readonly video: HTMLVideoElement;
  44. private _onUserActionRequestedObservable: Nullable<Observable<Texture>> = null;
  45. /**
  46. * Event triggerd when a dom action is required by the user to play the video.
  47. * This happens due to recent changes in browser policies preventing video to auto start.
  48. */
  49. public get onUserActionRequestedObservable(): Observable<Texture> {
  50. if (!this._onUserActionRequestedObservable) {
  51. this._onUserActionRequestedObservable = new Observable<Texture>();
  52. }
  53. return this._onUserActionRequestedObservable;
  54. }
  55. private _generateMipMaps: boolean;
  56. private _engine: Engine;
  57. private _stillImageCaptured = false;
  58. private _displayingPosterTexture = false;
  59. private _settings: VideoTextureSettings;
  60. private _createInternalTextureOnEvent: string;
  61. private _frameId = -1;
  62. /**
  63. * Creates a video texture.
  64. * If you want to display a video in your scene, this is the special texture for that.
  65. * This special texture works similar to other textures, with the exception of a few parameters.
  66. * @see https://doc.babylonjs.com/how_to/video_texture
  67. * @param name optional name, will detect from video source, if not defined
  68. * @param src can be used to provide an url, array of urls or an already setup HTML video element.
  69. * @param scene is obviously the current scene.
  70. * @param generateMipMaps can be used to turn on mipmaps (Can be expensive for videoTextures because they are often updated).
  71. * @param invertY is false by default but can be used to invert video on Y axis
  72. * @param samplingMode controls the sampling method and is set to TRILINEAR_SAMPLINGMODE by default
  73. * @param settings allows finer control over video usage
  74. */
  75. constructor(
  76. name: Nullable<string>,
  77. src: string | string[] | HTMLVideoElement,
  78. scene: Nullable<Scene>,
  79. generateMipMaps = false,
  80. invertY = false,
  81. samplingMode: number = Texture.TRILINEAR_SAMPLINGMODE,
  82. settings: VideoTextureSettings = {
  83. autoPlay: true,
  84. loop: true,
  85. autoUpdateTexture: true,
  86. }
  87. ) {
  88. super(null, scene, !generateMipMaps, invertY);
  89. this._engine = this.getScene()!.getEngine();
  90. this._generateMipMaps = generateMipMaps;
  91. this._initialSamplingMode = samplingMode;
  92. this.autoUpdateTexture = settings.autoUpdateTexture;
  93. this.name = name || this._getName(src);
  94. this.video = this._getVideo(src);
  95. this._settings = settings;
  96. if (settings.poster) {
  97. this.video.poster = settings.poster;
  98. }
  99. if (settings.autoPlay !== undefined) {
  100. this.video.autoplay = settings.autoPlay;
  101. }
  102. if (settings.loop !== undefined) {
  103. this.video.loop = settings.loop;
  104. }
  105. this.video.setAttribute("playsinline", "");
  106. this.video.addEventListener("paused", this._updateInternalTexture);
  107. this.video.addEventListener("seeked", this._updateInternalTexture);
  108. this.video.addEventListener("emptied", this.reset);
  109. this._createInternalTextureOnEvent = (settings.poster && !settings.autoPlay) ? "play" : "canplay";
  110. this.video.addEventListener(this._createInternalTextureOnEvent, this._createInternalTexture);
  111. const videoHasEnoughData = (this.video.readyState >= this.video.HAVE_CURRENT_DATA);
  112. if (settings.poster &&
  113. (!settings.autoPlay || !videoHasEnoughData)) {
  114. this._texture = this._engine.createTexture(settings.poster!, false, true, scene);
  115. this._displayingPosterTexture = true;
  116. }
  117. else if (videoHasEnoughData) {
  118. this._createInternalTexture();
  119. }
  120. }
  121. private _getName(src: string | string[] | HTMLVideoElement): string {
  122. if (src instanceof HTMLVideoElement) {
  123. return src.currentSrc;
  124. }
  125. if (typeof src === "object") {
  126. return src.toString();
  127. }
  128. return src;
  129. }
  130. private _getVideo(src: string | string[] | HTMLVideoElement): HTMLVideoElement {
  131. if (src instanceof HTMLVideoElement) {
  132. Tools.SetCorsBehavior(src.currentSrc, src);
  133. return src;
  134. }
  135. const video: HTMLVideoElement = document.createElement("video");
  136. if (typeof src === "string") {
  137. Tools.SetCorsBehavior(src, video);
  138. video.src = src;
  139. } else {
  140. Tools.SetCorsBehavior(src[0], video);
  141. src.forEach((url) => {
  142. const source = document.createElement("source");
  143. source.src = url;
  144. video.appendChild(source);
  145. });
  146. }
  147. return video;
  148. }
  149. private _createInternalTexture = (): void => {
  150. if (this._texture != null) {
  151. if (this._displayingPosterTexture) {
  152. this._texture.dispose();
  153. this._displayingPosterTexture = false;
  154. }
  155. else {
  156. return;
  157. }
  158. }
  159. if (!this._engine.needPOTTextures ||
  160. (Tools.IsExponentOfTwo(this.video.videoWidth) && Tools.IsExponentOfTwo(this.video.videoHeight))) {
  161. this.wrapU = Texture.WRAP_ADDRESSMODE;
  162. this.wrapV = Texture.WRAP_ADDRESSMODE;
  163. } else {
  164. this.wrapU = Texture.CLAMP_ADDRESSMODE;
  165. this.wrapV = Texture.CLAMP_ADDRESSMODE;
  166. this._generateMipMaps = false;
  167. }
  168. this._texture = this._engine.createDynamicTexture(
  169. this.video.videoWidth,
  170. this.video.videoHeight,
  171. this._generateMipMaps,
  172. this.samplingMode
  173. );
  174. if (!this.video.autoplay && !this._settings.poster) {
  175. let oldHandler = this.video.onplaying;
  176. let error = false;
  177. let oldMuted = this.video.muted;
  178. this.video.muted = true;
  179. this.video.onplaying = () => {
  180. this.video.muted = oldMuted;
  181. this.video.onplaying = oldHandler;
  182. this._texture!.isReady = true;
  183. this._updateInternalTexture();
  184. if (!error) {
  185. this.video.pause();
  186. }
  187. if (this.onLoadObservable.hasObservers()) {
  188. this.onLoadObservable.notifyObservers(this);
  189. }
  190. };
  191. var playing = this.video.play();
  192. if (playing) {
  193. playing.then(() => {
  194. // Everything is good.
  195. })
  196. .catch(() => {
  197. error = true;
  198. // On Chrome for instance, new policies might prevent playing without user interaction.
  199. if (this._onUserActionRequestedObservable && this._onUserActionRequestedObservable.hasObservers()) {
  200. this._onUserActionRequestedObservable.notifyObservers(this);
  201. }
  202. });
  203. }
  204. else {
  205. this.video.onplaying = oldHandler;
  206. this._texture.isReady = true;
  207. this._updateInternalTexture();
  208. if (this.onLoadObservable.hasObservers()) {
  209. this.onLoadObservable.notifyObservers(this);
  210. }
  211. }
  212. }
  213. else {
  214. this._texture.isReady = true;
  215. this._updateInternalTexture();
  216. if (this.onLoadObservable.hasObservers()) {
  217. this.onLoadObservable.notifyObservers(this);
  218. }
  219. }
  220. }
  221. private reset = (): void => {
  222. if (this._texture == null) {
  223. return;
  224. }
  225. if (!this._displayingPosterTexture) {
  226. this._texture.dispose();
  227. this._texture = null;
  228. }
  229. }
  230. /**
  231. * @hidden Internal method to initiate `update`.
  232. */
  233. public _rebuild(): void {
  234. this.update();
  235. }
  236. /**
  237. * Update Texture in the `auto` mode. Does not do anything if `settings.autoUpdateTexture` is false.
  238. */
  239. public update(): void {
  240. if (!this.autoUpdateTexture) {
  241. // Expecting user to call `updateTexture` manually
  242. return;
  243. }
  244. this.updateTexture(true);
  245. }
  246. /**
  247. * Update Texture in `manual` mode. Does not do anything if not visible or paused.
  248. * @param isVisible Visibility state, detected by user using `scene.getActiveMeshes()` or othervise.
  249. */
  250. public updateTexture(isVisible: boolean): void {
  251. if (!isVisible) {
  252. return;
  253. }
  254. if (this.video.paused && this._stillImageCaptured) {
  255. return;
  256. }
  257. this._stillImageCaptured = true;
  258. this._updateInternalTexture();
  259. }
  260. protected _updateInternalTexture = (): void => {
  261. if (this._texture == null || !this._texture.isReady) {
  262. return;
  263. }
  264. if (this.video.readyState < this.video.HAVE_CURRENT_DATA) {
  265. return;
  266. }
  267. if (this._displayingPosterTexture) {
  268. return;
  269. }
  270. let frameId = this.getScene()!.getFrameId();
  271. if (this._frameId === frameId) {
  272. return;
  273. }
  274. this._frameId = frameId;
  275. this._engine.updateVideoTexture(this._texture, this.video, this._invertY);
  276. }
  277. /**
  278. * Change video content. Changing video instance or setting multiple urls (as in constructor) is not supported.
  279. * @param url New url.
  280. */
  281. public updateURL(url: string): void {
  282. this.video.src = url;
  283. }
  284. /**
  285. * Dispose the texture and release its associated resources.
  286. */
  287. public dispose(): void {
  288. super.dispose();
  289. if (this._onUserActionRequestedObservable) {
  290. this._onUserActionRequestedObservable.clear();
  291. this._onUserActionRequestedObservable = null;
  292. }
  293. this.video.removeEventListener(this._createInternalTextureOnEvent, this._createInternalTexture);
  294. this.video.removeEventListener("paused", this._updateInternalTexture);
  295. this.video.removeEventListener("seeked", this._updateInternalTexture);
  296. this.video.removeEventListener("emptied", this.reset);
  297. this.video.pause();
  298. }
  299. /**
  300. * Creates a video texture straight from a stream.
  301. * @param scene Define the scene the texture should be created in
  302. * @param stream Define the stream the texture should be created from
  303. * @returns The created video texture as a promise
  304. */
  305. public static CreateFromStreamAsync(scene: Scene, stream: MediaStream): Promise<VideoTexture> {
  306. var video = document.createElement("video");
  307. video.setAttribute('autoplay', '');
  308. video.setAttribute('muted', 'true');
  309. video.setAttribute('playsinline', '');
  310. video.muted = true;
  311. if (video.mozSrcObject !== undefined) {
  312. // hack for Firefox < 19
  313. video.mozSrcObject = stream;
  314. } else {
  315. if (typeof video.srcObject == "object") {
  316. video.srcObject = stream;
  317. } else {
  318. window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
  319. video.src = (window.URL && window.URL.createObjectURL(stream));
  320. }
  321. }
  322. return new Promise<VideoTexture>((resolve) => {
  323. let onPlaying = () => {
  324. resolve(new VideoTexture("video", video, scene, true, true));
  325. video.removeEventListener("playing", onPlaying);
  326. };
  327. video.addEventListener("playing", onPlaying);
  328. video.play();
  329. });
  330. }
  331. /**
  332. * Creates a video texture straight from your WebCam video feed.
  333. * @param scene Define the scene the texture should be created in
  334. * @param constraints Define the constraints to use to create the web cam feed from WebRTC
  335. * @param audioConstaints Define the audio constraints to use to create the web cam feed from WebRTC
  336. * @returns The created video texture as a promise
  337. */
  338. public static CreateFromWebCamAsync(
  339. scene: Scene,
  340. constraints: {
  341. minWidth: number;
  342. maxWidth: number;
  343. minHeight: number;
  344. maxHeight: number;
  345. deviceId: string;
  346. } & MediaTrackConstraints,
  347. audioConstaints: boolean | MediaTrackConstraints = false
  348. ): Promise<VideoTexture> {
  349. var constraintsDeviceId;
  350. if (constraints && constraints.deviceId) {
  351. constraintsDeviceId = {
  352. exact: constraints.deviceId,
  353. };
  354. }
  355. if (navigator.mediaDevices) {
  356. return navigator.mediaDevices.getUserMedia({
  357. video: constraints,
  358. audio: audioConstaints
  359. })
  360. .then((stream) => {
  361. return this.CreateFromStreamAsync(scene, stream);
  362. });
  363. }
  364. else {
  365. navigator.getUserMedia =
  366. navigator.getUserMedia ||
  367. navigator.webkitGetUserMedia ||
  368. navigator.mozGetUserMedia ||
  369. navigator.msGetUserMedia;
  370. if (navigator.getUserMedia) {
  371. navigator.getUserMedia(
  372. {
  373. video: {
  374. deviceId: constraintsDeviceId,
  375. width: {
  376. min: (constraints && constraints.minWidth) || 256,
  377. max: (constraints && constraints.maxWidth) || 640,
  378. },
  379. height: {
  380. min: (constraints && constraints.minHeight) || 256,
  381. max: (constraints && constraints.maxHeight) || 480,
  382. },
  383. },
  384. audio: audioConstaints
  385. },
  386. (stream: any) => {
  387. return this.CreateFromStreamAsync(scene, stream);
  388. },
  389. function(e: MediaStreamError) {
  390. Logger.Error(e.name);
  391. }
  392. );
  393. }
  394. }
  395. return Promise.reject("No support for userMedia on this device");
  396. }
  397. /**
  398. * Creates a video texture straight from your WebCam video feed.
  399. * @param scene Define the scene the texture should be created in
  400. * @param onReady Define a callback to triggered once the texture will be ready
  401. * @param constraints Define the constraints to use to create the web cam feed from WebRTC
  402. * @param audioConstaints Define the audio constraints to use to create the web cam feed from WebRTC
  403. */
  404. public static CreateFromWebCam(
  405. scene: Scene,
  406. onReady: (videoTexture: VideoTexture) => void,
  407. constraints: {
  408. minWidth: number;
  409. maxWidth: number;
  410. minHeight: number;
  411. maxHeight: number;
  412. deviceId: string;
  413. } & MediaTrackConstraints,
  414. audioConstaints: boolean | MediaTrackConstraints = false
  415. ): void {
  416. this.CreateFromWebCamAsync(scene, constraints, audioConstaints)
  417. .then(function(videoTexture) {
  418. if (onReady) {
  419. onReady(videoTexture);
  420. }
  421. })
  422. .catch(function(err) {
  423. Logger.Error(err.name);
  424. });
  425. }
  426. }