two.js 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065
  1. /*
  2. The MIT License (MIT)
  3. Copyright (c) 2014-2020 Nikolai Suslov and the Krestianstvo.org project contributors. (https://github.com/NikolaySuslov/livecodingspace/blob/master/LICENSE.md)
  4. This driver includes the port and some code parts from the "Croquet synced video demo" for implementing video elements syncing within LiveCoding.space applications and LCS Reflector / Luminary.
  5. Croquet synced video demo License
  6. Copyright 2020 Croquet Corporation
  7. Licensed under the Apache License, Version 2.0 (the "License");
  8. you may not use this file except in compliance with the License.
  9. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. */
  17. // TWO JS view driver
  18. import { Fabric } from '/core/vwf/fabric.js';
  19. class TwoView extends Fabric {
  20. constructor(module) {
  21. console.log("TwoView constructor");
  22. super(module, 'View');
  23. }
  24. factory() {
  25. let _self_ = this;
  26. return this.load(this.module,
  27. {
  28. // == Module Definition ====================================================================
  29. initialize: function (options) {
  30. let self = this;
  31. this.fabric = _self_;
  32. this.nodes = {};
  33. this.overChilds = [];
  34. this.state.appInitialized = false;
  35. if (options === undefined) { options = {}; }
  36. if (typeof options == "object") {
  37. this.rootSelector = options["application-root"];
  38. }
  39. else {
  40. this.rootSelector = options;
  41. }
  42. this.lastStatusCheck = vwf.time() * 1000 + 500;
  43. this.clicked = false;
  44. this.isIOS = [
  45. 'iPad Simulator',
  46. 'iPhone Simulator',
  47. 'iPod Simulator',
  48. 'iPad',
  49. 'iPhone',
  50. 'iPod'
  51. ].includes(navigator.platform)
  52. // iPad on iOS 13 detection
  53. || (navigator.userAgent.includes("Mac") && "ontouchend" in document);
  54. function videostepping() {
  55. let twodriver = vwf.views["/drivers/view/two"];
  56. if (!twodriver.clicked)
  57. twodriver.clicked = true;
  58. let videos = Object.values(twodriver.state.nodes).filter(el => el.fillType == "video");
  59. videos.forEach(el => {
  60. let viewNode = twodriver.nodes[el.ID];
  61. if (viewNode.isStepping) {
  62. console.log(`exiting step mode`);
  63. el.muted = false;
  64. viewNode.isStepping = false;
  65. _self_.applyPlayState(el.ID);
  66. //document.body.removeEventListener("click", videostepping, false);
  67. //document.body.removeEventListener("touchstart", videostepping, false);
  68. //return;
  69. }
  70. })
  71. }
  72. document.body.addEventListener("click", videostepping, false);
  73. document.body.addEventListener("touchstart", videostepping, false);
  74. },
  75. createdNode: function (nodeID, childID, childExtendsID, childImplementsIDs,
  76. childSource, childType, childIndex, childName, callback /* ( ready ) */) {
  77. let self = this;
  78. var node = this.state.nodes[childID];
  79. // If the "nodes" object does not have this object in it, it must not be one that
  80. // this driver cares about
  81. if (!node) {
  82. return;
  83. }
  84. if (this.state.scenes[childID]) {
  85. let scene = this.state.scenes[childID];
  86. let space = scene.obj;
  87. _self_.resizeScene(childID);
  88. //
  89. space.bind('update', function (frameCount) {
  90. // This code is called everytime two.update() is called.
  91. // Effectively 60 times per second.
  92. _self_.update(frameCount);
  93. }).play(); // Finally, start the animation loop
  94. //TODO: FIX
  95. let avatarName = 'avatar-' + self.kernel.moniker();
  96. console.log("creating avatar...");
  97. var newNode = {
  98. "id": avatarName,
  99. "uri": avatarName,
  100. "extends": "proxy/two/player.vwf",
  101. "properties": {}
  102. }
  103. if (!self.state.nodes[avatarName]) {
  104. vwf_view.kernel.createChild(childID, avatarName, newNode);
  105. vwf_view.kernel.callMethod(avatarName, "createPlayerBody", []);
  106. }
  107. }
  108. if (this.state.nodes[childID] && this.state.nodes[childID].obj) {
  109. this.nodes[childID] = {
  110. id: childID,
  111. extends: childExtendsID,
  112. liveBindings: {}
  113. // lastTransformStep: 0,
  114. // lastAnimationStep: 0
  115. };
  116. if (this.nodes[childID].extends == "proxy/two/scene.vwf") {
  117. this.nodes[childID].mouse = new Two.Vector();
  118. window.addEventListener('mousemove', function (e) {
  119. e.preventDefault();
  120. let scene = self.state.nodes[childID].obj.renderer.scene;
  121. let x = e.offsetX / scene.scale;
  122. let y = e.offsetY / scene.scale;
  123. self.nodes[childID].mouse.set(x, y);
  124. //vwf_view.kernel.callMethod(el.nodeID, "mousedownEvent", []);
  125. });
  126. window.addEventListener('touchstart', function (e) {
  127. //e.preventDefault();
  128. let touch = e.changedTouches[0];
  129. let scene = self.state.nodes[childID].obj.renderer.scene;
  130. const { x, y, width, height } = e.target.getBoundingClientRect();
  131. self.nodes[childID].mouse.set(touch.pageX / scene.scale, touch.pageY / scene.scale);
  132. _self_.updateAvatarPosition();
  133. _self_.mouseDown(node, touch.pageX, touch.pageY);
  134. }, { passive: false });
  135. window.addEventListener('touchend', function (e) {
  136. //e.preventDefault();
  137. let touch = e.changedTouches[0];
  138. //let scene = self.state.nodes[childID].obj.renderer.scene;
  139. let x = touch.pageX;
  140. let y = touch.pageY;
  141. // _self_.updateAvatarPosition();
  142. _self_.mouseUp(node, x, y);
  143. //return false;
  144. }, { passive: false });
  145. window.addEventListener('touchmove', function (e) {
  146. e.preventDefault();
  147. let touch = e.changedTouches[0];
  148. let scene = self.state.nodes[childID].obj.renderer.scene;
  149. let x = touch.pageX / scene.scale;
  150. let y = touch.pageY / scene.scale;
  151. self.nodes[childID].mouse.set(x, y);
  152. }, { passive: false });
  153. //resize event
  154. window.addEventListener("resize", function (event) {
  155. _self_.resizeScene(childID);
  156. });
  157. //node.scene.obj.update();
  158. node.obj.renderer.domElement.addEventListener('mousedown', function (e) {
  159. var x = e.clientX;
  160. var y = e.clientY;
  161. //let nodes = self.state.nodes;
  162. _self_.mouseDown(node, x, y);
  163. }, false);
  164. node.obj.renderer.domElement.addEventListener('mouseup', function (e) {
  165. var x = e.clientX;
  166. var y = e.clientY;
  167. // let nodes = self.state.nodes;
  168. _self_.mouseUp(node, x, y);
  169. }, false);
  170. }
  171. // IF RENDERER SVG
  172. // if(node.prototypes.includes("proxy/two/path.vwf")) {
  173. // let elm = node.obj;
  174. // node.scene.obj.update();
  175. // elm._renderer.elem.addEventListener('click', function() {
  176. // vwf_view.kernel.callMethod(childID, "svgClickEvent", []);
  177. // },false);
  178. // }
  179. }
  180. },
  181. executed: function (nodeID, scriptText, scriptType) {
  182. let self = this;
  183. let node = this.state.nodes[nodeID];
  184. if (!(node)) {
  185. return;
  186. }
  187. },
  188. initializedNode: function (nodeID, childID) {
  189. let self = this;
  190. var node = this.state.nodes[childID];
  191. if (!node) {
  192. return;
  193. }
  194. },
  195. createdProperty: function (nodeId, propertyName, propertyValue) {
  196. return this.satProperty(nodeId, propertyName, propertyValue);
  197. },
  198. initializedProperty: function (nodeId, propertyName, propertyValue) {
  199. return this.satProperty(nodeId, propertyName, propertyValue);
  200. },
  201. gotProperty: function (nodeId, propertyName, propertyValue) {
  202. var node = this.state.nodes[nodeId];
  203. if (!(node && node.aframeObj)) {
  204. return;
  205. }
  206. },
  207. satProperty: function (nodeId, propertyName, propertyValue) {
  208. let self = this;
  209. var node = this.state.nodes[nodeId];
  210. const viewNode = this.nodes[nodeId];
  211. if (!(node && node.obj)) {
  212. return;
  213. }
  214. if (propertyName == "startOffset" || propertyName == "pausedTime" || propertyName == "isPlaying") {
  215. if (!viewNode.latestPlayState) {
  216. viewNode.latestPlayState = {
  217. "startOffset": null,
  218. "pausedTime": null,
  219. "isPlaying": false
  220. }
  221. }
  222. viewNode.latestPlayState[propertyName] = propertyValue;
  223. if (propertyName == "isPlaying") {
  224. viewNode.isPlaying = propertyValue;
  225. _self_.applyPlayState(nodeId);
  226. }
  227. }
  228. if (propertyName == 'mask') {
  229. let mask = Object.values(this.state.nodes).filter(el => (el.name == propertyValue))[0];
  230. if (mask) {
  231. node.obj.mask = mask.obj;
  232. self.kernel.setProperty(mask.ID, "maskedNode", node.name)
  233. }
  234. }
  235. if (propertyName == "bodyNode") {
  236. let bodyNode = Object.values(this.state.nodes).filter(el => (el.name == propertyValue) && (el["ID"].includes("avatar") !== true))[0];
  237. if (bodyNode)
  238. node.bodyNode = bodyNode.ID;
  239. }
  240. if (propertyName == "motionData") {
  241. let url = propertyValue;
  242. if (url) {
  243. fetch(url)
  244. .then(response => response.json())
  245. .then(data => {
  246. //console.log(data);
  247. node.motionData = data;
  248. });
  249. }
  250. }
  251. if (propertyName == "bodyTrack") {
  252. viewNode.bodyTrack = propertyValue;
  253. }
  254. if (propertyName == "fill") {
  255. if (node.obj.fill instanceof Two.Texture) {
  256. if (node.obj.fill.image.nodeName == 'VIDEO') {
  257. const video = node.obj.fill.image;
  258. viewNode.playbackBoost = 0;
  259. //_self_.applyPlayState(nodeId);
  260. viewNode.lastTimingCheck = vwf.time() * 1000 + 500;
  261. viewNode.isPlaying = false;
  262. viewNode.isBlocked = false;
  263. //video.requestVideoFrameCallback(_self_.animate);
  264. // video.addEventListener('timeupdate', (event) => {
  265. // if(viewNode.bodyTrack){
  266. // // console.log('time: ', video.currentTime);
  267. // }
  268. // });
  269. }
  270. }
  271. }
  272. },
  273. deletedNode: function (childID) {
  274. delete this.nodes[childID];
  275. },
  276. firedEvent: function (nodeID, eventName, eventParameters) {
  277. let self = this;
  278. var node = this.state.nodes[nodeID];
  279. if (!(node)) {
  280. return;
  281. }
  282. var clientThatSatProperty = self.kernel.client();
  283. var me = self.kernel.moniker();
  284. var avatarName = 'avatar-' + self.kernel.moniker();
  285. // if (eventName == "clickEvent" ||
  286. // eventName == 'mousedownEvent' ||
  287. // eventName == 'mouseupEvent') {
  288. // if (clientThatSatProperty == me) {
  289. // let methodName = eventName + 'Method';
  290. // self.kernel.callMethod(nodeID, methodName, eventParameters);
  291. // if (eventName == "clickEvent") {
  292. // let mode = vwf.getProperty(avatarName, 'selectMode');
  293. // if (mode) {
  294. // console.log("allow to click!!!")
  295. // vwf_view.kernel.setProperty(avatarName, 'selectMode', false);
  296. // let editorDriver = vwf.views["/drivers/view/editor"];
  297. // if (editorDriver) {
  298. // let selectSwitch = document.querySelector('#selectNodeSwitch');
  299. // // const selectSwitchComp = new mdc.iconButton.MDCIconButtonToggle(selectSwitch); //new mdc.select.MDCIconToggle
  300. // selectSwitch._comp.on = false;
  301. // let currentNodeDIV = document.querySelector('#currentNode');
  302. // if (currentNodeDIV) currentNodeDIV._setNode(nodeID);
  303. // }
  304. // }
  305. // }
  306. // }
  307. // }
  308. // if (eventName == "clickEvent") {
  309. // if (self.kernel.moniker() == eventParameters[0]) {
  310. // let avatar = self.nodes[avatarName];
  311. // let mode = vwf.getProperty(avatarName, 'selectMode');
  312. // vwf_view.kernel.callMethod(nodeID, "clickEventMethod", [])
  313. // }
  314. // }
  315. },
  316. ticked: function (vwfTime) {
  317. let self = this;
  318. _self_.updateAvatarPosition();
  319. // _self_.updateFilters();
  320. //lerpTick ();
  321. },
  322. calledMethod: function (nodeID, methodName, methodParameters, methodValue) {
  323. let self = this;
  324. var node = this.state.nodes[nodeID];
  325. const viewNode = this.nodes[nodeID];
  326. if (!(node && node.obj)) {
  327. return;
  328. }
  329. // if(methodName == "setMask"){
  330. // let mask = this.state.nodes[methodParameters[0]];
  331. // node.obj.mask = mask.obj;
  332. // }
  333. if (methodName == "setScale") {
  334. if (!node.obj.matrix.manual)
  335. node.obj.matrix.manual = true;
  336. node.obj.matrix.scale(methodParameters[0], methodParameters[1])
  337. }
  338. if (methodName == "unmute") {
  339. node.obj.fill.image.muted = false;
  340. }
  341. if (methodName == "syncVideoState") {
  342. _self_.applyPlayState(nodeID);
  343. }
  344. if (methodName == "setVideoState") {
  345. if (!viewNode.latestPlayState)
  346. viewNode.latestPlayState = {}
  347. // "isPlaying",
  348. // "startOffset",
  349. // "pausedTime"
  350. viewNode.latestPlayState["isPlaying"] = methodParameters[0];
  351. viewNode.latestPlayState["startOffset"] = methodParameters[1];
  352. viewNode.latestPlayState["pausedTime"] = methodParameters[2];
  353. _self_.applyPlayState(nodeID);
  354. }
  355. if (methodName == "playVideo") {
  356. if (node.obj.fill instanceof Two.Texture) {
  357. if (node.obj.fill.image.nodeName == 'VIDEO') {
  358. const video = node.obj.fill.image;
  359. //video.currentTime = _self_.wrappedTime(videoTime, true);
  360. //video.play();
  361. if (!viewNode.latestPlayState) {
  362. viewNode.latestPlayState = {
  363. "isPlaying": false,
  364. "startOffset": null,
  365. "pausedTime": 0,
  366. }
  367. }
  368. const wantsToPlay = !viewNode.latestPlayState.isPlaying; // toggle
  369. //viewNode.isPlaying = wantsToPlay;
  370. if (!wantsToPlay) {
  371. viewNode.isPlaying = false;
  372. _self_.pause(undefined, video, nodeID);
  373. } // immediately!
  374. const videoTime = video.currentTime;
  375. const sessionTime = vwf.time() * 1000; // the session time corresponding to the video time
  376. const startOffset = wantsToPlay ? sessionTime - 1000 * videoTime : null;
  377. const pausedTime = wantsToPlay ? 0 : videoTime;
  378. vwf_view.kernel.callMethod(nodeID, "setVideoState", [wantsToPlay, startOffset, pausedTime]);
  379. }
  380. }
  381. }
  382. if (methodName == "viewTroughFilter") {
  383. var clientThatSatProperty = self.kernel.client();
  384. var me = self.kernel.moniker();
  385. //let avatarID = methodParameters[0];
  386. //&& avatarID.includes(me)
  387. if (clientThatSatProperty == me) {
  388. console.log("MY VIEW!!!");
  389. let maskedNode = self.state.nodes[methodParameters[0]];
  390. if (maskedNode) {
  391. //maskedNode.obj.visible = methodParameters[1]
  392. if (maskedNode.obj.fill.image) {
  393. if (!self.isIOS) { //TODO: IOS
  394. if (self.clicked) {
  395. //maskedNode.obj.fill.image.muted = methodParameters[1];
  396. if (methodParameters[1]) {
  397. maskedNode.obj.fill.image.volume = 0
  398. } else {
  399. if (maskedNode.obj.fill.image.muted)
  400. maskedNode.obj.fill.image.muted = false
  401. maskedNode.obj.fill.image.volume = 1
  402. }
  403. }
  404. }
  405. }
  406. }
  407. }
  408. }
  409. if (methodName == "checkOver") {
  410. var clientThatSatProperty = self.kernel.client();
  411. var me = self.kernel.moniker();
  412. // If the transform property was initially updated by this view....
  413. if (clientThatSatProperty == me) {
  414. let scene = node.scene.obj.scene;
  415. let scale = scene.scale;
  416. var x = methodParameters[0] * scale;
  417. var y = methodParameters[1] * scale;
  418. let allChilds = _self_.getOverlayChilds(node.scene.obj.scene, x, y).map(x => {
  419. return x.nodeID;
  420. });
  421. if (JSON.stringify(allChilds) !== JSON.stringify(self.overChilds)) {
  422. //console.log(allChilds);
  423. let end = self.overChilds.filter(x => !allChilds.includes(x));
  424. //console.log("END OVERLAY..", end);
  425. end.map(x => {
  426. vwf_view.kernel.callMethod(x, "overendEvent", ['avatar-', me]);
  427. })
  428. let start = allChilds.filter(x => !self.overChilds.includes(x));
  429. //console.log("START OVERLAY..", start);
  430. start.map(x => {
  431. vwf_view.kernel.callMethod(x, "overstartEvent", ['avatar-', me]);
  432. })
  433. self.overChilds = allChilds;
  434. }
  435. //let still = self.overChilds.filter(x => allChilds.includes(x));
  436. //if (still.length > 0)
  437. // console.log("STILL OVERLAY..", still)
  438. }
  439. }
  440. }
  441. });
  442. }
  443. ///
  444. mouseUp(node, x, y) {
  445. let self = this.instance;
  446. let avatarID = "avatar-" + vwf.moniker();
  447. vwf_view.kernel.setProperty(avatarID, "mouseevent", "mouseup");
  448. let allChilds = this.getOverlayChilds(node.obj.scene, x, y).map(x => {
  449. return x.nodeID;
  450. });
  451. allChilds.forEach(el => {
  452. vwf_view.kernel.callMethod(el, "mouseupEvent", []);
  453. vwf_view.kernel.callMethod(el, "checkForDragEnd", [avatarID]);
  454. })
  455. }
  456. mouseDown(node, x, y) {
  457. let self = this.instance;
  458. let avatarID = "avatar-" + vwf.moniker();
  459. vwf_view.kernel.setProperty(avatarID, "mouseevent", "mousedown");
  460. let allChilds = this.getOverlayChilds(node.obj.scene, x, y).map(x => {
  461. return x.nodeID;
  462. });
  463. allChilds.forEach(el => {
  464. vwf_view.kernel.callMethod(el, "mousedownEvent", []);
  465. vwf_view.kernel.callMethod(el, "checkForDragStart", [avatarID])
  466. })
  467. }
  468. resizeScene(childID) {
  469. let self = this.instance;
  470. let scene = self.state.nodes[childID].obj;
  471. let renderer = scene.renderer
  472. let elem = renderer.domElement;
  473. let scale = Math.min(
  474. elem.offsetWidth / 1280,
  475. elem.offsetHeight / 720
  476. );
  477. // var scale = //elem.offsetHeight / 1000;
  478. renderer.scene.scale = scale;
  479. renderer.setSize(elem.offsetWidth, elem.offsetHeight);
  480. }
  481. ///VIDEO & SOUND SYNC//////
  482. // doSomethingWithTheFrame = (now, metadata) => {
  483. // // Do something with the frame.
  484. // console.log(now, metadata);
  485. // // Re-register the callback to be notified about the next frame.
  486. // video.requestVideoFrameCallback(doSomethingWithTheFrame);
  487. // };
  488. // // Initially register the callback to be notified about the first frame.
  489. // video.requestVideoFrameCallback(doSomethingWithTheFrame);
  490. update(frameCount) {
  491. let self = this.instance;
  492. const now = vwf.time() * 1000;
  493. this.animate(frameCount);
  494. if (now - self.lastStatusCheck > 100) {
  495. self.lastStatusCheck = now;
  496. this.checkPlayStatus();
  497. }
  498. }
  499. applyPlayState(nodeID) {
  500. let self = this.instance;
  501. const node = self.state.nodes[nodeID];
  502. const viewNode = self.nodes[nodeID];
  503. const video = node.obj.fill.image;
  504. if (!viewNode.latestPlayState.isPlaying) {
  505. // this.iconVisible('play', true);
  506. // this.iconVisible('enableSound', false);
  507. this.pause(viewNode.latestPlayState.pausedTime, video, nodeID);
  508. } else {
  509. video.playbackRate = 1 + viewNode.playbackBoost * 0.01;
  510. viewNode.lastRateAdjust = vwf.time() * 1000; // make sure we don't adjust rate until playback has settled in, and after any emergency jump we decide to do
  511. viewNode.jumpIfNeeded = false;
  512. // if the video is blocked from playing, enter a stepping mode in which we move the video forward with successive pause() calls
  513. viewNode.isPlaying = true;
  514. this.play(video, this.calculateVideoTime(nodeID) + 0.1, nodeID).then(playStarted => {
  515. if (playStarted) {
  516. setTimeout(function () {
  517. viewNode.jumpIfNeeded = true;
  518. }, 250);
  519. }
  520. else if (!video.muted) {
  521. console.log(`trying with mute`);
  522. video.muted = true;
  523. this.applyPlayState(nodeID);
  524. }
  525. else {
  526. console.log(`reverting to stepped display`);
  527. viewNode.isStepping = true;
  528. this.stepWhileBlocked(nodeID);
  529. }
  530. //this.iconVisible('enableSound', !playStarted || videoElem.muted);
  531. //if (playStarted) this.future(250).triggerJumpCheck(); // leave it a little time to stabilise
  532. })
  533. }
  534. }
  535. animate(frameCount, metadata) {
  536. let driver = vwf.views["/drivers/view/two"];
  537. if (driver) {
  538. let self = driver.instance;
  539. let videos = Object.values(self.state.nodes).filter(el => el.fillType == "video");
  540. videos.forEach(el => {
  541. let viewNode = self.nodes[el.ID];
  542. let node = self.state.nodes[el.ID];
  543. let video = node.obj.fill.image;
  544. let currentTime = video.currentTime;
  545. //console.log(video.currentTime);
  546. if (node.bodyNode) {
  547. let bodyNode = self.state.nodes[node.bodyNode];
  548. //if(metadata.presentedFrames % 5 == 0){
  549. if (viewNode.bodyTrack && bodyNode.motionData) {
  550. //console.log(currentTime);
  551. let bodyFrameNumber = Object.keys(bodyNode.motionData).filter(n => (Math.abs(Number.parseFloat(n) - currentTime) < 0.02))[0];
  552. let bodyFrame = bodyNode.motionData[bodyFrameNumber];
  553. let mul = 950;
  554. if (bodyFrame) {
  555. bodyNode.obj.children.map((e, i) => {
  556. if (e.nodeID.includes("joint")) {
  557. e.translation.x = bodyFrame[i].x * mul;
  558. e.translation.y = bodyFrame[i].y * mul;
  559. }
  560. })
  561. //16-14-12-11-13-15 - topline
  562. //12-24-23-11 - bottomline
  563. //18,16,20 - rh // 16,22
  564. //17,15,19 - lh // 15,21
  565. //let faceArr = [10,8,6,4,1,3,7,9]
  566. let ta = [16, 14, 12, 11, 13, 15];
  567. let topline = bodyNode.obj.children.filter(e => (e.nodeID.includes("topline")))[0];
  568. if (topline) {
  569. ta.map((e, i) => {
  570. topline.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  571. })
  572. }
  573. let rh = [18, 16, 20];
  574. let rhline = bodyNode.obj.children.filter(e => (e.nodeID.includes("rhand")))[0];
  575. if (rhline) {
  576. rh.map((e, i) => {
  577. rhline.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  578. })
  579. }
  580. let lh = [17, 15, 19];
  581. let lhline = bodyNode.obj.children.filter(e => (e.nodeID.includes("lhand")))[0];
  582. if (lhline) {
  583. lh.map((e, i) => {
  584. lhline.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  585. })
  586. }
  587. let rh2 = [16, 22];
  588. let rhline2 = bodyNode.obj.children.filter(e => (e.nodeID.includes("rhand2")))[0];
  589. if (rhline2) {
  590. rh2.map((e, i) => {
  591. rhline2.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  592. })
  593. }
  594. let lh2 = [15, 21];
  595. let lhline2 = bodyNode.obj.children.filter(e => (e.nodeID.includes("lhand2")))[0];
  596. if (lhline2) {
  597. lh2.map((e, i) => {
  598. lhline2.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  599. })
  600. }
  601. let faceArr = [10, 8, 6, 4, 1, 3, 7, 9, 10];
  602. let faceline = bodyNode.obj.children.filter(e => (e.nodeID.includes("faceline")))[0];
  603. if (faceline) {
  604. faceArr.map((e, i) => {
  605. faceline.vertices[i].set(bodyFrame[e].x * mul, bodyFrame[e].y * mul);
  606. })
  607. }
  608. // let ba = [11,23]; //[12,24,23,11];
  609. // let bottomline = bodyNode.obj.children.filter(e=>(e.nodeID.includes("bottomline")))[0];
  610. // if(bottomline){
  611. // ba.map((e,i)=>{
  612. // bottomline.vertices[i].set(bodyFrame[e].x*mul, bodyFrame[e].y*mul);
  613. // })
  614. // }
  615. }
  616. }
  617. }
  618. //}
  619. //video.requestVideoFrameCallback(self.animate);
  620. })
  621. }
  622. }
  623. checkPlayStatus() {
  624. let self = this.instance;
  625. //let scene = self.nodes[vwf.application()];
  626. let videos = Object.values(self.state.nodes).filter(el => el.fillType == "video");
  627. videos.forEach(el => {
  628. this.checkPlayStatusForNode(el.ID);
  629. })
  630. let toneJSDriver = vwf.views["/drivers/view/tone"];
  631. if (toneJSDriver) {
  632. let toneTransport = Object.values(toneJSDriver.state.nodes).filter(el => el.extendsID == "proxy/tonejs/transport.vwf")[0];
  633. if (toneTransport) {
  634. toneJSDriver.fabric.checkPlayStatusForTransportNode(toneTransport.ID);
  635. }
  636. }
  637. }
  638. checkPlayStatusForNode(nodeID) {
  639. let self = this.instance;
  640. let viewNode = self.nodes[nodeID];
  641. let node = self.state.nodes[nodeID];
  642. let video = node.obj.fill.image;
  643. //if (this.videoView) {
  644. // this.adjustPlaybar();
  645. const lastTimingCheck = viewNode.lastTimingCheck || 0;
  646. const now = vwf.time() * 1000;
  647. // check video timing every 0.5s
  648. if (viewNode.isPlaying && !viewNode.isBlocked && (now - lastTimingCheck >= 500)) {
  649. viewNode.lastTimingCheck = now;
  650. const expectedTime = this.wrappedTime(this.calculateVideoTime(nodeID), false, video.duration);
  651. const videoTime = video.currentTime;
  652. const videoDiff = videoTime - expectedTime;
  653. const videoDiffMS = videoDiff * 1000; // +ve means *ahead* of where it should be
  654. if (videoDiff < video.duration / 2) { // otherwise presumably measured across a loop restart; just ignore.
  655. if (viewNode.jumpIfNeeded) { //this.jumpIfNeeded
  656. viewNode.jumpIfNeeded = false;
  657. // if there's a difference greater than 500ms, try to jump the video to the right place
  658. if (Math.abs(videoDiffMS) > 500) {
  659. console.log(`jumping video by ${-Math.round(videoDiffMS)}ms`);
  660. video.currentTime = this.wrappedTime(videoTime - videoDiff + 0.1, true, video.duration); // 0.1 to counteract the delay that the jump itself tends to introduce; true to ensure we're not jumping beyond the last video frame
  661. }
  662. } else {
  663. // every 3s, check video lag/advance, and set the playback rate accordingly.
  664. // current adjustment settings:
  665. // > 150ms off: set playback 3% faster/slower than normal
  666. // > 50ms: 1% faster/slower
  667. // < 25ms: normal (i.e., hysteresis between 50ms and 25ms in the same sense)
  668. const lastRateAdjust = viewNode.lastRateAdjust || 0;
  669. if (now - lastRateAdjust >= 3000) {
  670. //console.log(`${Math.round(videoDiff*1000)}ms`);
  671. const oldBoostPercent = viewNode.playbackBoost;
  672. const diffAbs = Math.abs(videoDiffMS), diffSign = Math.sign(videoDiffMS);
  673. const desiredBoostPercent = -diffSign * (diffAbs > 150 ? 3 : (diffAbs > 50 ? 1 : 0));
  674. if (desiredBoostPercent !== oldBoostPercent) {
  675. // apply hysteresis on the switch to boost=0.
  676. // for example, if old boost was +ve (because video was lagging),
  677. // and videoDiff is -ve (i.e., it's still lagging),
  678. // and the magnitude (of the lag) is greater than 25ms,
  679. // don't remove the boost yet.
  680. const hysteresisBlock = desiredBoostPercent === 0 && Math.sign(oldBoostPercent) === -diffSign && diffAbs >= 25;
  681. if (!hysteresisBlock) {
  682. viewNode.playbackBoost = desiredBoostPercent;
  683. const playbackRate = 1 + viewNode.playbackBoost * 0.01;
  684. console.log(`video playback rate: ${playbackRate}`);
  685. video.playbackRate = playbackRate;
  686. }
  687. }
  688. viewNode.lastRateAdjust = now;
  689. }
  690. }
  691. }
  692. }
  693. // }
  694. }
  695. wrappedTime(videoTime, guarded, duration) {
  696. if (duration) {
  697. while (videoTime > duration) videoTime -= duration; // assume it's looping, with no gap between plays
  698. if (guarded) videoTime = Math.min(duration - 0.1, videoTime); // the video element freaks out on being told to seek very close to the end
  699. }
  700. return videoTime;
  701. }
  702. calculateVideoTime(nodeID) {
  703. let self = this.instance;
  704. const node = self.state.nodes[nodeID];
  705. const viewNode = self.nodes[nodeID];
  706. //const video = node.obj.fill.image;
  707. // const { isPlaying, startOffset } = this.latestPlayState;
  708. //if (!isPlaying) debugger;
  709. const sessionNow = vwf.time() * 1000;
  710. let t = (sessionNow - viewNode.latestPlayState.startOffset) / 1000;
  711. // console.log('Time: ', t)
  712. return t;
  713. }
  714. pause(videoTime, video, nodeID) {
  715. let self = this.instance;
  716. const node = self.state.nodes[nodeID];
  717. const viewNode = self.nodes[nodeID];
  718. viewNode.isPlaying = viewNode.isBlocked = false; // might not be blocked next time.
  719. this.setStatic(videoTime, video);
  720. }
  721. setStatic(videoTime, video) {
  722. if (video) {
  723. if (videoTime !== undefined) video.currentTime = this.wrappedTime(videoTime, true, video.duration); // true => guarded from values too near the end
  724. video.pause(); // no return value; synchronous, instantaneous?
  725. }
  726. }
  727. triggerJumpCheck(nodeID) {
  728. let self = this.instance;
  729. //const node = self.state.nodes[nodeID];
  730. const viewNode = self.nodes[nodeID];
  731. viewNode.jumpIfNeeded = true;
  732. }
  733. stepWhileBlocked(nodeID) {
  734. let _self = this;
  735. let self = this.instance;
  736. const node = self.state.nodes[nodeID];
  737. const viewNode = self.nodes[nodeID];
  738. const video = node.obj.fill.image;
  739. if (!viewNode.isStepping) return; // we've left stepping mode
  740. if (!viewNode.isBlocked) {
  741. viewNode.isStepping = false;
  742. return;
  743. }
  744. this.setStatic(this.calculateVideoTime(nodeID), video);
  745. setTimeout(function () {
  746. _self.stepWhileBlocked(nodeID)
  747. }, 250);
  748. //this.future(250).stepWhileBlocked(); // jerky, but keeping up
  749. }
  750. async play(video, videoTime, nodeID) {
  751. let self = this.instance;
  752. const node = self.state.nodes[nodeID];
  753. const viewNode = self.nodes[nodeID];
  754. // return true if video play started successfully
  755. const duration = video.duration;
  756. video.currentTime = this.wrappedTime(videoTime, true, duration);
  757. viewNode.isPlaying = true;
  758. //this.isPlaying = true; // even if it turns out to be blocked by the browser
  759. // following guidelines from https://developer.mozilla.org/docs/Web/API/HTMLMediaElement/play
  760. try {
  761. await video.play(); // will throw exception if blocked
  762. viewNode.isBlocked = false;
  763. } catch (err) {
  764. console.warn("video play blocked");
  765. viewNode.isBlocked = viewNode.isPlaying; // just in case isPlaying was set false while we were trying
  766. }
  767. return !viewNode.isBlocked;
  768. }
  769. /////
  770. getOverlayChilds(node, x, y) {
  771. //let childs = node.scene.obj.scene.children;
  772. var childs = [];
  773. node.children.forEach(el => {
  774. if (!el.nodeID.includes('avatar')) {
  775. let rect = el.getBoundingClientRect();
  776. if (x >= rect.left && x <= rect.right && y >= rect.top && y <= rect.bottom) {
  777. childs.push(el);
  778. if (el.nodeName == "group") {
  779. childs = childs.concat(this.getOverlayChilds(el, x, y));
  780. }
  781. }
  782. }
  783. })
  784. return childs
  785. }
  786. updateAvatarPosition() {
  787. let self = this.instance;
  788. let avatarName = 'avatar-' + self.kernel.moniker();
  789. var node = self.state.nodes[avatarName];
  790. var nodeView = self.nodes[avatarName];
  791. let scene = self.nodes[vwf.application()];
  792. if (!node) return;
  793. if (!node.obj) return;
  794. let position = scene.mouse;
  795. if (!nodeView.lastPosition) {
  796. nodeView.lastPosition = new Two.Vector(position.x, position.y);
  797. }
  798. let lastPosition = nodeView.lastPosition;
  799. if (position && !(position.equals(lastPosition))) {
  800. // self.kernel.setProperty(avatarName, "x", position.x);
  801. // self.kernel.setProperty(avatarName, "y", position.y);
  802. self.kernel.callMethod(avatarName, "move", [position.x, position.y]);
  803. }
  804. nodeView.lastPosition.set(position.x, position.y)
  805. }
  806. postLoadAction(nodeID) {
  807. //vwf_view.kernel.fireEvent(nodeID, "postLoadAction")
  808. }
  809. }
  810. export { TwoView as default }