plugin.webaudio.js 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326
  1. /*
  2. ----------------------------------------------------------
  3. Web Audio API - OGG or MPEG Soundbank
  4. ----------------------------------------------------------
  5. http://webaudio.github.io/web-audio-api/
  6. ----------------------------------------------------------
  7. */
  8. (function(root) { 'use strict';
  9. window.AudioContext && (function() {
  10. var audioContext = null; // new AudioContext();
  11. var useStreamingBuffer = false; // !!audioContext.createMediaElementSource;
  12. var midi = root.WebAudio = {api: 'webaudio'};
  13. var ctx; // audio context
  14. var sources = {};
  15. var effects = {};
  16. var masterVolume = 127;
  17. var audioBuffers = {};
  18. ///
  19. midi.audioBuffers = audioBuffers;
  20. midi.send = function(data, delay) { };
  21. midi.setController = function(channelId, type, value, delay) { };
  22. midi.setVolume = function(channelId, volume, delay) {
  23. if (delay) {
  24. setTimeout(function() {
  25. masterVolume = volume;
  26. }, delay * 1000);
  27. } else {
  28. masterVolume = volume;
  29. }
  30. };
  31. midi.programChange = function(channelId, program, delay) {
  32. // if (delay) {
  33. // return setTimeout(function() {
  34. // var channel = root.channels[channelId];
  35. // channel.instrument = program;
  36. // }, delay);
  37. // } else {
  38. var channel = root.channels[channelId];
  39. channel.instrument = program;
  40. // }
  41. };
  42. midi.pitchBend = function(channelId, program, delay) {
  43. // if (delay) {
  44. // setTimeout(function() {
  45. // var channel = root.channels[channelId];
  46. // channel.pitchBend = program;
  47. // }, delay);
  48. // } else {
  49. var channel = root.channels[channelId];
  50. channel.pitchBend = program;
  51. // }
  52. };
  53. midi.noteOn = function(channelId, noteId, velocity, delay) {
  54. delay = delay || 0;
  55. /// check whether the note exists
  56. var channel = root.channels[channelId];
  57. var instrument = channel.instrument;
  58. var bufferId = instrument + '' + noteId;
  59. var buffer = audioBuffers[bufferId];
  60. if (!buffer) {
  61. // console.log(MIDI.GM.byId[instrument].id, instrument, channelId);
  62. return;
  63. }
  64. /// convert relative delay to absolute delay
  65. if (delay < ctx.currentTime) {
  66. delay += ctx.currentTime;
  67. }
  68. /// create audio buffer
  69. if (useStreamingBuffer) {
  70. var source = ctx.createMediaElementSource(buffer);
  71. } else { // XMLHTTP buffer
  72. var source = ctx.createBufferSource();
  73. source.buffer = buffer;
  74. }
  75. /// add effects to buffer
  76. if (effects) {
  77. var chain = source;
  78. for (var key in effects) {
  79. chain.connect(effects[key].input);
  80. chain = effects[key];
  81. }
  82. }
  83. /// add gain + pitchShift
  84. var gain = (velocity / 127) * (masterVolume / 127) * 2 - 1;
  85. source.connect(ctx.destination);
  86. source.playbackRate.value = 1; // pitch shift
  87. source.gainNode = ctx.createGain(); // gain
  88. source.gainNode.connect(ctx.destination);
  89. source.gainNode.gain.value = Math.min(1.0, Math.max(-1.0, gain));
  90. source.connect(source.gainNode);
  91. ///
  92. if (useStreamingBuffer) {
  93. if (delay) {
  94. return setTimeout(function() {
  95. buffer.currentTime = 0;
  96. buffer.play()
  97. }, delay * 1000);
  98. } else {
  99. buffer.currentTime = 0;
  100. buffer.play()
  101. }
  102. } else {
  103. source.start(delay || 0);
  104. }
  105. ///
  106. sources[channelId + '' + noteId] = source;
  107. ///
  108. return source;
  109. };
  110. midi.noteOff = function(channelId, noteId, delay) {
  111. delay = delay || 0;
  112. /// check whether the note exists
  113. var channel = root.channels[channelId];
  114. var instrument = channel.instrument;
  115. var bufferId = instrument + '' + noteId;
  116. var buffer = audioBuffers[bufferId];
  117. if (buffer) {
  118. if (delay < ctx.currentTime) {
  119. delay += ctx.currentTime;
  120. }
  121. ///
  122. var source = sources[channelId + '' + noteId];
  123. if (source) {
  124. if (source.gainNode) {
  125. // @Miranet: 'the values of 0.2 and 0.3 could of course be used as
  126. // a 'release' parameter for ADSR like time settings.'
  127. // add { 'metadata': { release: 0.3 } } to soundfont files
  128. var gain = source.gainNode.gain;
  129. gain.linearRampToValueAtTime(gain.value, delay);
  130. gain.linearRampToValueAtTime(-1.0, delay + 0.3);
  131. }
  132. ///
  133. if (useStreamingBuffer) {
  134. if (delay) {
  135. setTimeout(function() {
  136. buffer.pause();
  137. }, delay * 1000);
  138. } else {
  139. buffer.pause();
  140. }
  141. } else {
  142. if (source.noteOff) {
  143. source.noteOff(delay + 0.5);
  144. } else {
  145. source.stop(delay + 0.5);
  146. }
  147. }
  148. ///
  149. delete sources[channelId + '' + noteId];
  150. ///
  151. return source;
  152. }
  153. }
  154. };
  155. midi.chordOn = function(channel, chord, velocity, delay) {
  156. var res = {};
  157. for (var n = 0, note, len = chord.length; n < len; n++) {
  158. res[note = chord[n]] = midi.noteOn(channel, note, velocity, delay);
  159. }
  160. return res;
  161. };
  162. midi.chordOff = function(channel, chord, delay) {
  163. var res = {};
  164. for (var n = 0, note, len = chord.length; n < len; n++) {
  165. res[note = chord[n]] = midi.noteOff(channel, note, delay);
  166. }
  167. return res;
  168. };
  169. midi.stopAllNotes = function() {
  170. for (var sid in sources) {
  171. var delay = 0;
  172. if (delay < ctx.currentTime) {
  173. delay += ctx.currentTime;
  174. }
  175. var source = sources[sid];
  176. source.gain.linearRampToValueAtTime(1, delay);
  177. source.gain.linearRampToValueAtTime(0, delay + 0.3);
  178. if (source.noteOff) { // old api
  179. source.noteOff(delay + 0.3);
  180. } else { // new api
  181. source.stop(delay + 0.3);
  182. }
  183. delete sources[sid];
  184. }
  185. };
  186. midi.setEffects = function(list) {
  187. if (ctx.tunajs) {
  188. for (var n = 0; n < list.length; n ++) {
  189. var data = list[n];
  190. var effect = new ctx.tunajs[data.type](data);
  191. effect.connect(ctx.destination);
  192. effects[data.type] = effect;
  193. }
  194. } else {
  195. return console.log('Effects module not installed.');
  196. }
  197. };
  198. midi.connect = function(opts) {
  199. root.setDefaultPlugin(midi);
  200. midi.setContext(ctx || createAudioContext(), opts.onsuccess);
  201. };
  202. midi.getContext = function() {
  203. return ctx;
  204. };
  205. midi.setContext = function(newCtx, onload, onprogress, onerror) {
  206. ctx = newCtx;
  207. /// tuna.js effects module - https://github.com/Dinahmoe/tuna
  208. if (typeof Tuna !== 'undefined' && !ctx.tunajs) {
  209. ctx.tunajs = new Tuna(ctx);
  210. }
  211. /// loading audio files
  212. var urls = [];
  213. var notes = root.keyToNote;
  214. for (var key in notes) urls.push(key);
  215. ///
  216. var waitForEnd = function(instrument) {
  217. for (var key in bufferPending) { // has pending items
  218. if (bufferPending[key]) return;
  219. }
  220. ///
  221. if (onload) { // run onload once
  222. onload();
  223. onload = null;
  224. }
  225. };
  226. ///
  227. var requestAudio = function(soundfont, instrumentId, index, key) {
  228. var url = soundfont[key];
  229. if (url) {
  230. bufferPending[instrumentId] ++;
  231. loadAudio(url, function(buffer) {
  232. buffer.id = key;
  233. var noteId = root.keyToNote[key];
  234. audioBuffers[instrumentId + '' + noteId] = buffer;
  235. ///
  236. if (-- bufferPending[instrumentId] === 0) {
  237. var percent = index / 87;
  238. // console.log(MIDI.GM.byId[instrumentId], 'processing: ', percent);
  239. soundfont.isLoaded = true;
  240. waitForEnd(instrument);
  241. }
  242. }, function(err) {
  243. // console.log(err);
  244. });
  245. }
  246. };
  247. ///
  248. var bufferPending = {};
  249. for (var instrument in root.Soundfont) {
  250. var soundfont = root.Soundfont[instrument];
  251. if (soundfont.isLoaded) {
  252. continue;
  253. }
  254. ///
  255. var synth = root.GM.byName[instrument];
  256. var instrumentId = synth.number;
  257. ///
  258. bufferPending[instrumentId] = 0;
  259. ///
  260. for (var index = 0; index < urls.length; index++) {
  261. var key = urls[index];
  262. requestAudio(soundfont, instrumentId, index, key);
  263. }
  264. }
  265. ///
  266. setTimeout(waitForEnd, 1);
  267. };
  268. /* Load audio file: streaming | base64 | arraybuffer
  269. ---------------------------------------------------------------------- */
  270. function loadAudio(url, onload, onerror) {
  271. if (useStreamingBuffer) {
  272. var audio = new Audio();
  273. audio.src = url;
  274. audio.controls = false;
  275. audio.autoplay = false;
  276. audio.preload = false;
  277. audio.addEventListener('canplay', function() {
  278. onload && onload(audio);
  279. });
  280. audio.addEventListener('error', function(err) {
  281. onerror && onerror(err);
  282. });
  283. document.body.appendChild(audio);
  284. } else if (url.indexOf('data:audio') === 0) { // Base64 string
  285. var base64 = url.split(',')[1];
  286. var buffer = Base64Binary.decodeArrayBuffer(base64);
  287. ctx.decodeAudioData(buffer, onload, onerror);
  288. } else { // XMLHTTP buffer
  289. var request = new XMLHttpRequest();
  290. request.open('GET', url, true);
  291. request.responseType = 'arraybuffer';
  292. request.onload = function() {
  293. ctx.decodeAudioData(request.response, onload, onerror);
  294. };
  295. request.send();
  296. }
  297. };
  298. function createAudioContext() {
  299. return new (window.AudioContext || window.webkitAudioContext)();
  300. };
  301. })();
  302. })(MIDI);