tone.js 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593
  1. /*
  2. The MIT License (MIT)
  3. Copyright (c) 2014-2020 Nikolai Suslov and the Krestianstvo.org project contributors. (https://github.com/NikolaySuslov/livecodingspace/blob/master/LICENSE.md)
  4. This driver includes the port and some code parts from the "Croquet synced video demo" for implementing Player elements syncing within LiveCoding.space applications and LCS Reflector / Luminary.
  5. Croquet synced video demo License
  6. Copyright 2020 Croquet Corporation
  7. Licensed under the Apache License, Version 2.0 (the "License");
  8. you may not use this file except in compliance with the License.
  9. You may obtain a copy of the License at
  10. http://www.apache.org/licenses/LICENSE-2.0
  11. Unless required by applicable law or agreed to in writing, software
  12. distributed under the License is distributed on an "AS IS" BASIS,
  13. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. See the License for the specific language governing permissions and
  15. limitations under the License.
  16. */
  17. // VWF & Tone driver
  18. import { Fabric } from '/core/vwf/fabric.js';
  19. //import * as Tone from '/drivers/view/tonejs/dist/Tone.js';
  20. class ToneViewDriver extends Fabric {
  21. constructor(module) {
  22. console.log("ToneViewDriver constructor");
  23. super(module, 'View');
  24. }
  25. factory() {
  26. let _self_ = this;
  27. return this.load(this.module,
  28. {
  29. initialize: function () {
  30. let self = this;
  31. this.fabric = _self_;
  32. this.nodes = {};
  33. this.toneStarted = false;
  34. function toneStart() {
  35. if (!self.toneStarted) {
  36. Tone.start().then(r => {
  37. let toneTransport = Object.values(self.state.nodes).filter(el => el.extendsID == "proxy/tonejs/transport.vwf")[0];
  38. if (toneTransport) {
  39. _self_.applyPlayState(toneTransport.ID);
  40. }
  41. console.log("context started");
  42. self.toneStarted = true;
  43. });
  44. //document.body.removeEventListener("click", toneStart, false);
  45. }
  46. }
  47. document.body.addEventListener("click", toneStart, false);
  48. //window._Tone = Tone.default;
  49. },
  50. createdNode: function (nodeID, childID, childExtendsID, childImplementsIDs,
  51. childSource, childType, childIndex, childName, callback /* ( ready ) */) {
  52. let self = this;
  53. var node = this.state.nodes[childID];
  54. // If the "nodes" object does not have this object in it, it must not be one that
  55. // this driver cares about
  56. if (!node) {
  57. return;
  58. }
  59. this.nodes[childID] = {
  60. id: childID,
  61. extends: childExtendsID,
  62. parent: nodeID,
  63. toneObj: node.toneObj
  64. };
  65. if (this.nodes[childID].extends == "proxy/tonejs/transport.vwf") {
  66. this.nodes[childID].playbackBoost = 0;
  67. //_self_.applyPlayState(nodeId);
  68. this.nodes[childID].lastTimingCheck = vwf.time() * 1000;
  69. }
  70. },
  71. firedEvent: function (nodeID, eventName, eventParameters) {
  72. //let self = this;
  73. },
  74. initializedProperty: function (nodeId, propertyName, propertyValue) {
  75. return this.satProperty(nodeId, propertyName, propertyValue);
  76. },
  77. satProperty: function (nodeId, propertyName, propertyValue) {
  78. let self = this;
  79. var node = this.state.nodes[nodeId];
  80. const viewNode = this.nodes[nodeId];
  81. if (!(node && node.toneObj)) {
  82. return;
  83. }
  84. // if(propertyName == "state"){
  85. // //let toneState = node.toneObj.state;
  86. // if(propertyValue == "started"){
  87. // node.toneObj.start()
  88. // } else if (!propertyValue || propertyValue== "stopped"){
  89. // node.toneObj.stop()
  90. // } else if (propertyValue == "paused"){
  91. // node.toneObj.pause()
  92. // }
  93. // }
  94. if (viewNode.extends == "proxy/tonejs/player.vwf") {
  95. if (propertyName == "url") {
  96. node.toneObj.load(propertyValue).then(r => { //buffer.load for GrainPLayer
  97. console.log('LOADED: ', node);
  98. if (node.toneObj.startTime && node.toneObj.state == "stopped") {
  99. node.toneObj.sync().start(node.toneObj.startTime);
  100. }
  101. })
  102. }
  103. if (propertyName == "startTime") {
  104. if (node.toneObj.state == "stopped") {
  105. if (node.toneObj.loaded == true)
  106. node.toneObj.sync().start(propertyValue);
  107. }
  108. }
  109. }
  110. if (propertyName == "startOffset" || propertyName == "pausedTime" || propertyName == "isPlaying") {
  111. if (!viewNode.latestPlayState) {
  112. viewNode.latestPlayState = {
  113. "startOffset": null,
  114. "pausedTime": null,
  115. "isPlaying": false
  116. }
  117. }
  118. viewNode.latestPlayState[propertyName] = propertyValue;
  119. if (propertyName == "isPlaying") {
  120. viewNode.isPlaying = propertyValue;
  121. _self_.applyPlayState(nodeId);
  122. }
  123. }
  124. },
  125. /*
  126. * Receives incoming messages
  127. */
  128. calledMethod: function (nodeID, methodName, methodParameters, methodValue) {
  129. let self = this;
  130. let node = this.state.nodes[nodeID];
  131. const viewNode = this.nodes[nodeID];
  132. // If the "nodes" object does not have this object in it, it must not be one that
  133. // this driver cares about
  134. if (!node) {
  135. return;
  136. }
  137. if (methodName == "syncTransportState") {
  138. _self_.applyPlayState(nodeID);
  139. }
  140. if (methodName == "setTransportState") {
  141. if (!viewNode.latestPlayState)
  142. viewNode.latestPlayState = {}
  143. // "isPlaying",
  144. // "startOffset",
  145. // "pausedTime"
  146. viewNode.latestPlayState["isPlaying"] = methodParameters[0];
  147. viewNode.latestPlayState["startOffset"] = methodParameters[1];
  148. viewNode.latestPlayState["pausedTime"] = methodParameters[2];
  149. _self_.applyPlayState(nodeID);
  150. }
  151. if (methodName == "toggleTransport") {
  152. const obj = node.toneObj;
  153. const wantsToPlay = !viewNode.latestPlayState.isPlaying; // toggle
  154. //viewNode.isPlaying = wantsToPlay;
  155. if (!wantsToPlay) {
  156. viewNode.isPlaying = false;
  157. _self_.pause(undefined, obj);
  158. } // immediately!
  159. const objTime = obj.seconds; //obj.position;
  160. const sessionTime = vwf.time() * 1000; // the session time corresponding to the video time
  161. const startOffset = wantsToPlay ? sessionTime - 1000 * objTime : null;
  162. const pausedTime = wantsToPlay ? 0 : objTime;
  163. vwf_view.kernel.callMethod(nodeID, "setTransportState", [wantsToPlay, startOffset, pausedTime]);
  164. }
  165. if (self.state.isPlayerDefinition(node.prototypes)) {
  166. if (methodName == "syncStart") {
  167. if (node.toneObj.state == "stopped") {
  168. if (methodParameters[0] == "now") {
  169. node.toneObj.sync().start(Tone.Transport.seconds);
  170. } else {
  171. node.toneObj.sync().start(methodParameters[0]);
  172. }
  173. }
  174. }
  175. if (methodName == "start") {
  176. node.toneObj.start();
  177. }
  178. if (methodName == "stop") {
  179. if (node.toneObj.state == "started")
  180. node.toneObj.stop();
  181. }
  182. if (methodName == "syncStop") {
  183. if (node.toneObj.state == "started")
  184. node.toneObj.sync().stop();
  185. }
  186. // if (methodName == "pause") {
  187. // node.toneObj.pause();
  188. // }
  189. }
  190. if (self.state.isTransportDefinition(node.prototypes)) {
  191. if (methodName == "start") {
  192. node.toneObj.start();
  193. }
  194. if (methodName == "stop") {
  195. if (node.toneObj.state == "started")
  196. node.toneObj.stop();
  197. }
  198. if (methodName == "pause") {
  199. node.toneObj.pause();
  200. }
  201. }
  202. if (methodName == "sync") {
  203. if (node.toneObj) {
  204. node.toneObj.sync();
  205. }
  206. }
  207. if (methodName == "scheduleRepeat") {
  208. Tone.Transport.scheduleRepeat((time) => {
  209. // use the callback time to schedule events
  210. //node.toneObj.start(time).stop(time + 0.2);
  211. node.toneObj.triggerAttackRelease("C4", "32n", time);
  212. }, "8n");
  213. }
  214. if (methodName == "triggerAttackRelease") {
  215. if (node.toneObj) {
  216. const now = methodParameters[2] ? methodParameters[2] :
  217. (node.toneObj._synced ? Tone.Transport.seconds : Tone.now());
  218. let notes = methodParameters[0];
  219. // let notes = methodParameters[0].map(el=>{
  220. // return Tone.Frequency(el).toNote();
  221. // })
  222. if (self.state.isMembraneSynthDefinition(node.prototypes)) {
  223. node.toneObj.triggerAttackRelease(notes[0], methodParameters[1][0], now);
  224. } else if (self.state.isNoiseSynthDefinition(node.prototypes)) {
  225. node.toneObj.triggerAttackRelease("16n", now)
  226. }
  227. else {
  228. node.toneObj.triggerAttackRelease(notes, methodParameters[1], now, methodParameters[3])
  229. }
  230. }
  231. }
  232. if (methodName == "triggerAttack") {
  233. if (node.toneObj) {
  234. const now = Tone.now()
  235. node.toneObj.triggerAttack(methodParameters[0], now, methodParameters[1])
  236. }
  237. }
  238. if (methodName == "triggerRelease") {
  239. if (node.toneObj) {
  240. node.toneObj.triggerRelease(methodParameters[0], "+0.1")
  241. }
  242. }
  243. }
  244. });
  245. }
  246. checkPlayStatusForTransportNode(nodeID) {
  247. const now = vwf.time() * 1000;
  248. let self = this.instance;
  249. let viewNode = self.nodes[nodeID];
  250. let node = self.state.nodes[nodeID];
  251. // let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  252. // let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  253. // if(notloaded.length > 0) return
  254. // let syncedPlayers = playerNodes.filter(el=>(el.toneObj._synced == true));
  255. let video = node.toneObj;
  256. let duration = video.duration; //(video.loopEnd - video.loopStart); // //
  257. let currentTime = video.seconds;
  258. //if (this.videoView) {
  259. // this.adjustPlaybar();
  260. const lastTimingCheck = viewNode.lastTimingCheck || 0;
  261. // check video timing every 0.5s
  262. if (viewNode.isPlaying && (now - lastTimingCheck >= 500)) {
  263. viewNode.lastTimingCheck = now;
  264. const expectedTime = this.wrappedTime(this.calculateVideoTime(nodeID), false, duration);
  265. //const videoTime = video.seconds;
  266. const videoDiff = currentTime - expectedTime;
  267. const videoDiffMS = videoDiff * 1000; // +ve means *ahead* of where it should be
  268. if (videoDiff < duration / 2) { // otherwise presumably measured across a loop restart; just ignore.
  269. if (viewNode.jumpIfNeeded) { //this.jumpIfNeeded
  270. viewNode.jumpIfNeeded = false;
  271. // if there's a difference greater than 500ms, try to jump the video to the right place
  272. if (Math.abs(videoDiffMS) > 500) {
  273. console.log(`jumping video by ${-Math.round(videoDiffMS)}ms`);
  274. video.pause();
  275. video.seconds = this.wrappedTime(currentTime - videoDiff, true, duration); //+ 0.1
  276. video.start();
  277. // 0.1 to counteract the delay that the jump itself tends to introduce; true to ensure we're not jumping beyond the last video frame
  278. }
  279. } else {
  280. // every 3s, check video lag/advance, and set the playback rate accordingly.
  281. // current adjustment settings:
  282. // > 150ms off: set playback 3% faster/slower than normal
  283. // > 50ms: 1% faster/slower
  284. // < 25ms: normal (i.e., hysteresis between 50ms and 25ms in the same sense)
  285. //////
  286. // const lastRateAdjust = viewNode.lastRateAdjust || 0;
  287. // if (now - lastRateAdjust >= 3000) {
  288. // //console.log(`${Math.round(videoDiff*1000)}ms`);
  289. // const oldBoostPercent = viewNode.playbackBoost;
  290. // const diffAbs = Math.abs(videoDiffMS), diffSign = Math.sign(videoDiffMS);
  291. // const desiredBoostPercent = -diffSign * (diffAbs > 150 ? 3 : (diffAbs > 50 ? 1 : 0));
  292. // if (desiredBoostPercent !== oldBoostPercent) {
  293. // // apply hysteresis on the switch to boost=0.
  294. // // for example, if old boost was +ve (because video was lagging),
  295. // // and videoDiff is -ve (i.e., it's still lagging),
  296. // // and the magnitude (of the lag) is greater than 25ms,
  297. // // don't remove the boost yet.
  298. // const hysteresisBlock = desiredBoostPercent === 0 && Math.sign(oldBoostPercent) === -diffSign && diffAbs >= 25;
  299. // if (!hysteresisBlock) {
  300. // viewNode.playbackBoost = desiredBoostPercent;
  301. // const playbackRate = 1 + viewNode.playbackBoost * 0.01;
  302. // console.log(`video playback rate: ${playbackRate}`);
  303. // //video.bpm.value = playbackRate*video.initbpm;
  304. // // video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  305. // // if(syncedPlayers.length > 0){
  306. // // syncedPlayers.map(el=>{
  307. // // el.toneObj.playbackRate = playbackRate;
  308. // // console.log("change playbackRate for ", el.ID, playbackRate);
  309. // // })
  310. // // }
  311. // //player.seek(progress * this.player.buffer.duration)
  312. // //video.playbackRate = playbackRate;
  313. // }
  314. // }
  315. // viewNode.lastRateAdjust = now;
  316. // }
  317. }
  318. }
  319. }
  320. // }
  321. }
  322. wrappedTime(videoTime, guarded, duration) {
  323. if (duration) {
  324. while (videoTime > duration) videoTime -= duration; // assume it's looping, with no gap between plays
  325. if (guarded) videoTime = Math.min(duration, videoTime); // the video element freaks out on being told to seek very close to the end //- 0.1
  326. }
  327. return videoTime;
  328. }
  329. calculateVideoTime(nodeID) {
  330. let self = this.instance;
  331. const node = self.state.nodes[nodeID];
  332. const viewNode = self.nodes[nodeID];
  333. //const video = node.obj.fill.image;
  334. // const { isPlaying, startOffset } = this.latestPlayState;
  335. //if (!isPlaying) debugger;
  336. const sessionNow = vwf.time() * 1000;
  337. let t = (sessionNow - viewNode.latestPlayState.startOffset) / 1000;
  338. //console.log('Time: ', t)
  339. return t;
  340. }
  341. pause(videoTime, video) {
  342. //this.isPlaying = this.isBlocked = false; // might not be blocked next time.
  343. this.setStatic(videoTime, video);
  344. }
  345. setStatic(videoTime, video) {
  346. let duration = video.duration;// video.loopEnd - video.loopStart; //video.duration; //
  347. if (videoTime !== undefined) {
  348. if (video.state == "started") {
  349. video.pause(); // no return value; synchronous, instantaneous?
  350. }
  351. video.seconds = this.wrappedTime(videoTime, true, duration); // true => guarded from values too near the end
  352. }
  353. }
  354. async play(video, videoTime) {
  355. let self = this.instance;
  356. let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  357. let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  358. if(notloaded.length > 0) return
  359. // return true if video play started successfully
  360. let duration = video.duration; //video.loopEnd - video.loopStart; //
  361. //this.isPlaying = true; // even if it turns out to be blocked by the browser
  362. // following guidelines from https://developer.mozilla.org/docs/Web/API/HTMLMediaElement/play
  363. // if(video.state == "stopped" || video.state == "paused") {
  364. let position = this.wrappedTime(videoTime, true, duration);
  365. if (video.state == "started") {
  366. video.pause();
  367. video.seconds = this.wrappedTime(videoTime, true, duration);
  368. video.start();
  369. }
  370. if (video.state == "stopped" || video.state == "paused") {
  371. video.start();
  372. }
  373. //video.toggle();
  374. //}
  375. // try {
  376. // await video.start(); // will throw exception if blocked
  377. // //this.isBlocked = false;
  378. // } catch (err) {
  379. // console.warn("video play blocked");
  380. // // this.isBlocked = this.isPlaying; // just in case isPlaying was set false while we were trying
  381. // }
  382. return true //!this.isBlocked;
  383. }
  384. applyPlayState(nodeID) {
  385. let self = this.instance;
  386. const node = self.state.nodes[nodeID];
  387. const viewNode = self.nodes[nodeID];
  388. const video = node.toneObj;
  389. if (viewNode.latestPlayState) {
  390. if (!viewNode.latestPlayState.isPlaying) {
  391. // this.iconVisible('play', true);
  392. // this.iconVisible('enableSound', false);
  393. this.pause(viewNode.latestPlayState.pausedTime, video);
  394. } else {
  395. //video.playbackRate = 1 + viewNode.playbackBoost * 0.01;
  396. //let playerNodes = Object.values(self.state.nodes).filter(el=>(el.extendsID == "proxy/tonejs/player.vwf"));
  397. //let notloaded = playerNodes.filter(el=>(el.toneObj.loaded == false));
  398. //if(notloaded.length > 0) return
  399. // let syncedPlayers = playerNodes.filter(el=>(el.toneObj._synced == true));
  400. // let playbackRate = 1 + viewNode.playbackBoost * 0.01;
  401. //video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  402. //video.bpm.value = playbackRate*video.initbpm;
  403. // video.bpm.rampTo(playbackRate*video.initbpm, 0.1);
  404. // if(syncedPlayers.length > 0){
  405. // syncedPlayers.map(el=>{
  406. // el.toneObj.playbackRate = playbackRate;
  407. // console.log("change playbackRate for ", el.ID, playbackRate);
  408. // })
  409. // }
  410. viewNode.lastRateAdjust = vwf.time() * 1000; // make sure we don't adjust rate until playback has settled in, and after any emergency jump we decide to do
  411. viewNode.jumpIfNeeded = false;
  412. // if the video is blocked from playing, enter a stepping mode in which we move the video forward with successive pause() calls
  413. viewNode.isPlaying = true;
  414. this.play(video, this.calculateVideoTime(nodeID)).then(playStarted => { // + 0.1
  415. if (playStarted) {
  416. // setTimeout(function () {
  417. viewNode.jumpIfNeeded = true;
  418. // }, 250);
  419. }
  420. //this.iconVisible('enableSound', !playStarted || videoElem.muted);
  421. //if (playStarted) this.future(250).triggerJumpCheck(); // leave it a little time to stabilise
  422. })
  423. }
  424. }
  425. }
  426. }
  427. export { ToneViewDriver as default }