tone.js 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607
  1. /*
  2. The MIT License (MIT)
  3. Copyright (c) 2014-2020 Nikolai Suslov and the Krestianstvo.org project contributors. (https://github.com/NikolaySuslov/livecodingspace/blob/master/LICENSE.md)
  4. This driver includes the port and some code parts from the "Croquet synced video demo" for implementing Player elements syncing within LiveCoding.space applications and LCS Reflector / Luminary.
  5. Croquet synced video demo License
  6. Copyright 2020 Croquet Corporation
  7. Licensed under the Apache License, Version 2.0 (the "License");
  8. you may not use this file except in compliance with the License.
  9. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. */
  17. // VWF & Tone driver
  18. import { Fabric } from '/core/vwf/fabric.js';
  19. //import * as Tone from '/drivers/view/tonejs/dist/Tone.js';
  20. class ToneViewDriver extends Fabric {
  21. constructor(module) {
  22. console.log("ToneViewDriver constructor");
  23. super(module, 'View');
  24. }
  25. factory() {
  26. let _self_ = this;
  27. return this.load(this.module,
  28. {
  29. initialize: function () {
  30. let self = this;
  31. this.fabric = _self_;
  32. this.nodes = {};
  33. this.toneStarted = false;
  34. function toneStart() {
  35. if (!self.toneStarted) {
  36. let ctx = Tone.getContext();
  37. if (ctx.state == 'suspended') {
  38. Tone.getContext().resume().then(r => {
  39. console.log("context started");
  40. self.toneStarted = true;
  41. let toneTransport = Object.values(self.state.nodes).filter(el => el.extendsID == "proxy/tonejs/transport.vwf")[0];
  42. if (toneTransport) {
  43. _self_.applyPlayState(toneTransport.ID);
  44. }
  45. });
  46. }
  47. // Tone.start().then(r => {
  48. // let toneTransport = Object.values(self.state.nodes).filter(el => el.extendsID == "proxy/tonejs/transport.vwf")[0];
  49. // if (toneTransport) {
  50. // _self_.applyPlayState(toneTransport.ID);
  51. // }
  52. // console.log("context started");
  53. // self.toneStarted = true;
  54. // });
  55. document.body.removeEventListener("click", toneStart, false);
  56. }
  57. }
  58. document.body.addEventListener("click", toneStart, false);
  59. //window._Tone = Tone.default;
  60. },
  61. createdNode: function (nodeID, childID, childExtendsID, childImplementsIDs,
  62. childSource, childType, childIndex, childName, callback /* ( ready ) */) {
  63. let self = this;
  64. var node = this.state.nodes[childID];
  65. // If the "nodes" object does not have this object in it, it must not be one that
  66. // this driver cares about
  67. if (!node) {
  68. return;
  69. }
  70. this.nodes[childID] = {
  71. id: childID,
  72. extends: childExtendsID,
  73. parent: nodeID,
  74. toneObj: node.toneObj
  75. };
  76. if (this.nodes[childID].extends == "proxy/tonejs/transport.vwf") {
  77. this.nodes[childID].playbackBoost = 0;
  78. //_self_.applyPlayState(nodeId);
  79. this.nodes[childID].lastTimingCheck = vwf.time() * 1000;
  80. }
  81. },
  82. firedEvent: function (nodeID, eventName, eventParameters) {
  83. //let self = this;
  84. },
  85. initializedProperty: function (nodeId, propertyName, propertyValue) {
  86. return this.satProperty(nodeId, propertyName, propertyValue);
  87. },
  88. satProperty: function (nodeId, propertyName, propertyValue) {
  89. let self = this;
  90. var node = this.state.nodes[nodeId];
  91. const viewNode = this.nodes[nodeId];
  92. if (!(node && node.toneObj)) {
  93. return;
  94. }
  95. // if(propertyName == "state"){
  96. // //let toneState = node.toneObj.state;
  97. // if(propertyValue == "started"){
  98. // node.toneObj.start()
  99. // } else if (!propertyValue || propertyValue== "stopped"){
  100. // node.toneObj.stop()
  101. // } else if (propertyValue == "paused"){
  102. // node.toneObj.pause()
  103. // }
  104. // }
  105. if (viewNode.extends == "proxy/tonejs/player.vwf") {
  106. if (propertyName == "url") {
  107. node.toneObj.load(propertyValue).then(r => { //buffer.load for GrainPLayer
  108. console.log('LOADED: ', node);
  109. if (node.toneObj.startTime && node.toneObj.state == "stopped") {
  110. node.toneObj.sync().start(node.toneObj.startTime);
  111. }
  112. })
  113. }
  114. if (propertyName == "startTime") {
  115. if (node.toneObj.state == "stopped") {
  116. if (node.toneObj.loaded == true)
  117. node.toneObj.sync().start(propertyValue);
  118. }
  119. }
  120. }
  121. if (propertyName == "startOffset" || propertyName == "pausedTime" || propertyName == "isPlaying") {
  122. if (!viewNode.latestPlayState) {
  123. viewNode.latestPlayState = {
  124. "startOffset": null,
  125. "pausedTime": null,
  126. "isPlaying": false
  127. }
  128. }
  129. viewNode.latestPlayState[propertyName] = propertyValue;
  130. if (propertyName == "isPlaying") {
  131. viewNode.isPlaying = propertyValue;
  132. _self_.applyPlayState(nodeId);
  133. }
  134. }
  135. },
  136. /*
  137. * Receives incoming messages
  138. */
  139. calledMethod: function (nodeID, methodName, methodParameters, methodValue) {
  140. let self = this;
  141. let node = this.state.nodes[nodeID];
  142. const viewNode = this.nodes[nodeID];
  143. // If the "nodes" object does not have this object in it, it must not be one that
  144. // this driver cares about
  145. if (!node) {
  146. return;
  147. }
  148. if (methodName == "syncTransportState") {
  149. _self_.applyPlayState(nodeID);
  150. }
  151. if (methodName == "setTransportState") {
  152. if (!viewNode.latestPlayState)
  153. viewNode.latestPlayState = {}
  154. // "isPlaying",
  155. // "startOffset",
  156. // "pausedTime"
  157. viewNode.latestPlayState["isPlaying"] = methodParameters[0];
  158. viewNode.latestPlayState["startOffset"] = methodParameters[1];
  159. viewNode.latestPlayState["pausedTime"] = methodParameters[2];
  160. _self_.applyPlayState(nodeID);
  161. }
  162. if (methodName == "toggleTransport") {
  163. const obj = node.toneObj;
  164. const wantsToPlay = !viewNode.latestPlayState.isPlaying; // toggle
  165. //viewNode.isPlaying = wantsToPlay;
  166. if (!wantsToPlay) {
  167. viewNode.isPlaying = false;
  168. _self_.pause(undefined, obj);
  169. } // immediately!
  170. const objTime = obj.seconds; //obj.position;
  171. const sessionTime = vwf.time() * 1000; // the session time corresponding to the video time
  172. const startOffset = wantsToPlay ? sessionTime - 1000 * objTime : null;
  173. const pausedTime = wantsToPlay ? 0 : objTime;
  174. vwf_view.kernel.callMethod(nodeID, "setTransportState", [wantsToPlay, startOffset, pausedTime]);
  175. }
  176. if (self.state.isPlayerDefinition(node.prototypes)) {
  177. if (methodName == "syncStart") {
  178. if (node.toneObj.state == "stopped") {
  179. if (methodParameters[0] == "now") {
  180. node.toneObj.sync().start(Tone.Transport.seconds);
  181. } else {
  182. node.toneObj.sync().start(methodParameters[0]);
  183. }
  184. }
  185. }
  186. if (methodName == "start") {
  187. node.toneObj.start();
  188. }
  189. if (methodName == "stop") {
  190. if (node.toneObj.state == "started")
  191. node.toneObj.stop();
  192. }
  193. if (methodName == "syncStop") {
  194. if (node.toneObj.state == "started")
  195. node.toneObj.sync().stop();
  196. }
  197. // if (methodName == "pause") {
  198. // node.toneObj.pause();
  199. // }
  200. }
  201. if (self.state.isTransportDefinition(node.prototypes)) {
  202. if (methodName == "start") {
  203. node.toneObj.start();
  204. }
  205. if (methodName == "stop") {
  206. if (node.toneObj.state == "started")
  207. node.toneObj.stop();
  208. }
  209. if (methodName == "pause") {
  210. node.toneObj.pause();
  211. }
  212. }
  213. if (methodName == "sync") {
  214. if (node.toneObj) {
  215. node.toneObj.sync();
  216. }
  217. }
  218. if (methodName == "scheduleRepeat") {
  219. Tone.Transport.scheduleRepeat((time) => {
  220. // use the callback time to schedule events
  221. //node.toneObj.start(time).stop(time + 0.2);
  222. node.toneObj.triggerAttackRelease("C4", "32n", time);
  223. }, "8n");
  224. }
  225. if (methodName == "triggerAttackRelease") {
  226. if (node.toneObj) {
  227. const now = methodParameters[2] ? methodParameters[2] :
  228. (node.toneObj._synced ? Tone.Transport.seconds : undefined);//Tone.now());
  229. let notes = methodParameters[0];
  230. // let notes = methodParameters[0].map(el=>{
  231. // return Tone.Frequency(el).toNote();
  232. // })
  233. if (self.state.isMembraneSynthDefinition(node.prototypes)) {
  234. node.toneObj.triggerAttackRelease(notes[0], methodParameters[1][0], now);
  235. } else if (self.state.isNoiseSynthDefinition(node.prototypes)) {
  236. node.toneObj.triggerAttackRelease("16n", now)
  237. }
  238. else {
  239. node.toneObj.triggerAttackRelease(notes, methodParameters[1], now, methodParameters[3])
  240. }
  241. }
  242. }
  243. if (methodName == "triggerAttack") {
  244. if (node.toneObj) {
  245. const now = Tone.now()
  246. node.toneObj.triggerAttack(methodParameters[0], now, methodParameters[1])
  247. }
  248. }
  249. if (methodName == "triggerRelease") {
  250. if (node.toneObj) {
  251. node.toneObj.triggerRelease(methodParameters[0], "+0.1")
  252. }
  253. }
  254. }
  255. });
  256. }
  257. checkPlayStatusForTransportNode(nodeID) {
  258. const now = vwf.time() * 1000;
  259. let self = this.instance;
  260. let viewNode = self.nodes[nodeID];
  261. let node = self.state.nodes[nodeID];
  262. // let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  263. // let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  264. // if(notloaded.length > 0) return
  265. // let syncedPlayers = playerNodes.filter(el=>(el.toneObj._synced == true));
  266. let video = node.toneObj;
  267. let duration = video.duration; //(video.loopEnd - video.loopStart); // //
  268. let currentTime = video.seconds;
  269. //if (this.videoView) {
  270. // this.adjustPlaybar();
  271. const lastTimingCheck = viewNode.lastTimingCheck || 0;
  272. // check video timing every 0.5s
  273. if (viewNode.isPlaying && (now - lastTimingCheck >= 500)) {
  274. viewNode.lastTimingCheck = now;
  275. const expectedTime = this.wrappedTime(this.calculateVideoTime(nodeID), false, duration);
  276. //const videoTime = video.seconds;
  277. const videoDiff = currentTime - expectedTime;
  278. const videoDiffMS = videoDiff * 1000; // +ve means *ahead* of where it should be
  279. if (videoDiff < duration / 2) { // otherwise presumably measured across a loop restart; just ignore.
  280. if (viewNode.jumpIfNeeded) { //this.jumpIfNeeded
  281. viewNode.jumpIfNeeded = false;
  282. // if there's a difference greater than 500ms, try to jump the video to the right place
  283. if (Math.abs(videoDiffMS) > 500) {
  284. console.log(`jumping video by ${-Math.round(videoDiffMS)}ms`);
  285. video.pause();
  286. video.seconds = this.wrappedTime(currentTime - videoDiff, true, duration); //+ 0.1
  287. video.start();
  288. // 0.1 to counteract the delay that the jump itself tends to introduce; true to ensure we're not jumping beyond the last video frame
  289. }
  290. } else {
  291. // every 3s, check video lag/advance, and set the playback rate accordingly.
  292. // current adjustment settings:
  293. // > 150ms off: set playback 3% faster/slower than normal
  294. // > 50ms: 1% faster/slower
  295. // < 25ms: normal (i.e., hysteresis between 50ms and 25ms in the same sense)
  296. //////
  297. // const lastRateAdjust = viewNode.lastRateAdjust || 0;
  298. // if (now - lastRateAdjust >= 3000) {
  299. // //console.log(`${Math.round(videoDiff*1000)}ms`);
  300. // const oldBoostPercent = viewNode.playbackBoost;
  301. // const diffAbs = Math.abs(videoDiffMS), diffSign = Math.sign(videoDiffMS);
  302. // const desiredBoostPercent = -diffSign * (diffAbs > 150 ? 3 : (diffAbs > 50 ? 1 : 0));
  303. // if (desiredBoostPercent !== oldBoostPercent) {
  304. // // apply hysteresis on the switch to boost=0.
  305. // // for example, if old boost was +ve (because video was lagging),
  306. // // and videoDiff is -ve (i.e., it's still lagging),
  307. // // and the magnitude (of the lag) is greater than 25ms,
  308. // // don't remove the boost yet.
  309. // const hysteresisBlock = desiredBoostPercent === 0 && Math.sign(oldBoostPercent) === -diffSign && diffAbs >= 25;
  310. // if (!hysteresisBlock) {
  311. // viewNode.playbackBoost = desiredBoostPercent;
  312. // const playbackRate = 1 + viewNode.playbackBoost * 0.01;
  313. // console.log(`video playback rate: ${playbackRate}`);
  314. // //video.bpm.value = playbackRate*video.initbpm;
  315. // // video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  316. // // if(syncedPlayers.length > 0){
  317. // // syncedPlayers.map(el=>{
  318. // // el.toneObj.playbackRate = playbackRate;
  319. // // console.log("change playbackRate for ", el.ID, playbackRate);
  320. // // })
  321. // // }
  322. // //player.seek(progress * this.player.buffer.duration)
  323. // //video.playbackRate = playbackRate;
  324. // }
  325. // }
  326. // viewNode.lastRateAdjust = now;
  327. // }
  328. }
  329. }
  330. }
  331. // }
  332. }
  333. wrappedTime(videoTime, guarded, duration) {
  334. if (duration) {
  335. while (videoTime > duration) videoTime -= duration; // assume it's looping, with no gap between plays
  336. if (guarded) videoTime = Math.min(duration, videoTime); // the video element freaks out on being told to seek very close to the end //- 0.1
  337. }
  338. return videoTime;
  339. }
  340. calculateVideoTime(nodeID) {
  341. let self = this.instance;
  342. const node = self.state.nodes[nodeID];
  343. const viewNode = self.nodes[nodeID];
  344. //const video = node.obj.fill.image;
  345. // const { isPlaying, startOffset } = this.latestPlayState;
  346. //if (!isPlaying) debugger;
  347. const sessionNow = vwf.time() * 1000;
  348. let t = (sessionNow - viewNode.latestPlayState.startOffset) / 1000;
  349. //console.log('Time: ', t)
  350. return t;
  351. }
  352. pause(videoTime, video) {
  353. //this.isPlaying = this.isBlocked = false; // might not be blocked next time.
  354. this.setStatic(videoTime, video);
  355. }
  356. setStatic(videoTime, video) {
  357. let duration = video.duration;// video.loopEnd - video.loopStart; //video.duration; //
  358. if (videoTime !== undefined) {
  359. if (video.state == "started") {
  360. video.pause(); // no return value; synchronous, instantaneous?
  361. }
  362. video.seconds = this.wrappedTime(videoTime, true, duration); // true => guarded from values too near the end
  363. }
  364. }
  365. async play(video, videoTime) {
  366. let self = this.instance;
  367. let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  368. let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  369. if(notloaded.length > 0) return
  370. // return true if video play started successfully
  371. let duration = video.duration; //video.loopEnd - video.loopStart; //
  372. //this.isPlaying = true; // even if it turns out to be blocked by the browser
  373. // following guidelines from https://developer.mozilla.org/docs/Web/API/HTMLMediaElement/play
  374. // if(video.state == "stopped" || video.state == "paused") {
  375. let position = this.wrappedTime(videoTime, true, duration);
  376. if (video.state == "started") {
  377. video.pause();
  378. video.seconds = this.wrappedTime(videoTime, true, duration);
  379. video.start();
  380. }
  381. if (video.state == "stopped" || video.state == "paused") {
  382. video.start();
  383. }
  384. //video.toggle();
  385. //}
  386. // try {
  387. // await video.start(); // will throw exception if blocked
  388. // //this.isBlocked = false;
  389. // } catch (err) {
  390. // console.warn("video play blocked");
  391. // // this.isBlocked = this.isPlaying; // just in case isPlaying was set false while we were trying
  392. // }
  393. return true //!this.isBlocked;
  394. }
  395. applyPlayState(nodeID) {
  396. let self = this.instance;
  397. const node = self.state.nodes[nodeID];
  398. const viewNode = self.nodes[nodeID];
  399. const video = node.toneObj;
  400. if (viewNode.latestPlayState) {
  401. if (!viewNode.latestPlayState.isPlaying) {
  402. // this.iconVisible('play', true);
  403. // this.iconVisible('enableSound', false);
  404. this.pause(viewNode.latestPlayState.pausedTime, video);
  405. } else {
  406. //video.playbackRate = 1 + viewNode.playbackBoost * 0.01;
  407. //let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  408. //let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  409. //if(notloaded.length > 0) return
  410. // let syncedPlayers = playerNodes.filter(el=>(el.toneObj._synced == true));
  411. // let playbackRate = 1 + viewNode.playbackBoost * 0.01;
  412. //video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  413. //video.bpm.value = playbackRate*video.initbpm;
  414. // video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  415. // if(syncedPlayers.length > 0){
  416. // syncedPlayers.map(el=>{
  417. // el.toneObj.playbackRate = playbackRate;
  418. // console.log("change playbackRate for ", el.ID, playbackRate);
  419. // })
  420. // }
  421. viewNode.lastRateAdjust = vwf.time() * 1000; // make sure we don't adjust rate until playback has settled in, and after any emergency jump we decide to do
  422. viewNode.jumpIfNeeded = false;
  423. // if the video is blocked from playing, enter a stepping mode in which we move the video forward with successive pause() calls
  424. viewNode.isPlaying = true;
  425. this.play(video, this.calculateVideoTime(nodeID)).then(playStarted => { // + 0.1
  426. if (playStarted) {
  427. // setTimeout(function () {
  428. viewNode.jumpIfNeeded = true;
  429. // }, 250);
  430. }
  431. //this.iconVisible('enableSound', !playStarted || videoElem.muted);
  432. //if (playStarted) this.future(250).triggerJumpCheck(); // leave it a little time to stabilise
  433. })
  434. }
  435. }
  436. }
  437. }
  438. export { ToneViewDriver as default }