Skip to content
This repository was archived by the owner on Feb 18, 2022. It is now read-only.

[WIP] record audio to file #28

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 76 additions & 51 deletions demo/demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
Sequencer,
Sampler,
Synth,
Recorder
} from '../src';

import Polysynth from './polysynth';
Expand All @@ -19,6 +20,7 @@ export default class Demo extends Component {

this.state = {
playing: true,
downloadLinkVisible: false,
};

this.handleAudioProcess = this.handleAudioProcess.bind(this);
Expand All @@ -32,68 +34,91 @@ export default class Demo extends Component {
playing: !this.state.playing,
});
}

handleRecordStop(blob, fileName) {
this.setState({
downloadLinkVisible: true,
}, () => {
const url = URL.createObjectURL(blob);
const anchor = this.refs.downloadLink;
anchor.href = url;
anchor.download = new Date().toISOString() + '.wav';
});
}
renderDownloadLink() {
if (!this.state.downloadLinkVisible) {
return null;
}

return (
<a ref="downloadLink" className="react-music-download-link">Download</a>
);
}
render() {
return (
<div>
<Song
playing={this.state.playing}
tempo={90}
>
<Analyser onAudioProcess={this.handleAudioProcess}>
<Sequencer
resolution={16}
bars={1}
>
<Sampler
sample="samples/kick.wav"
steps={[0, 2, 8, 10]}
/>
<Sampler
sample="samples/snare.wav"
steps={[4, 12]}
/>
</Sequencer>
<Sequencer
resolution={16}
bars={2}
>
<Polysynth
steps={[
[0, 1, ['c3', 'd#3', 'g3' ]],
[2, 1, ['c4']],
[8, 1, ['c3', 'd#3', 'g3']],
[10, 1, ['c4']],
[12, 1, ['c3', 'd#3', 'g3']],
[14, 1, ['d#4']],
[16, 1, ['f3', 'g#3', 'c4']],
[18, 1, ['f3', 'g#3', 'c4']],
[24, 1, ['f3', 'g#3', 'c4']],
[26, 1, ['f3', 'g#3', 'c4']],
[28, 1, ['f3', 'g#3', 'c4']],
[30, 1, ['f3', 'g#3', 'c4']],
]}
/>
</Sequencer>
<Sequencer
resolution={16}
bars={2}
>
<Synth
type="sine"
steps={[
[0, 8, 'c2'],
[8, 4, 'c2'],
[12, 4, 'd#2'],
[16, 8, 'f2'],
[24, 8, 'f1'],
]}
/>
</Sequencer>
</Analyser>
<Recorder onRecordStop={this.handleRecordStop.bind(this)} isRecording={this.state.playing}>
<Analyser onAudioProcess={this.handleAudioProcess}>
<Sequencer
resolution={16}
bars={1}
>
<Sampler
sample="samples/kick.wav"
steps={[0, 2, 8, 10]}
/>
<Sampler
sample="samples/snare.wav"
steps={[4, 12]}
/>
</Sequencer>
<Sequencer
resolution={16}
bars={2}
>
<Polysynth
steps={[
[0, 1, ['c3', 'd#3', 'g3' ]],
[2, 1, ['c4']],
[8, 1, ['c3', 'd#3', 'g3']],
[10, 1, ['c4']],
[12, 1, ['c3', 'd#3', 'g3']],
[14, 1, ['d#4']],
[16, 1, ['f3', 'g#3', 'c4']],
[18, 1, ['f3', 'g#3', 'c4']],
[24, 1, ['f3', 'g#3', 'c4']],
[26, 1, ['f3', 'g#3', 'c4']],
[28, 1, ['f3', 'g#3', 'c4']],
[30, 1, ['f3', 'g#3', 'c4']],
]}
/>
</Sequencer>
<Sequencer
resolution={16}
bars={2}
>
<Synth
type="sine"
steps={[
[0, 8, 'c2'],
[8, 4, 'c2'],
[12, 4, 'd#2'],
[16, 8, 'f2'],
[24, 8, 'f1'],
]}
/>
</Sequencer>
</Analyser>
</Recorder>
</Song>

<Visualization ref={(c) => { this.visualization = c; }} />

{this.state.downloadLinkVisible && this.renderDownloadLink()}
<button
className="react-music-button"
type="button"
Expand Down
2 changes: 1 addition & 1 deletion demo/index.css
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@
.react-music-button:active {
box-shadow: none;
border: solid 1px #a7a7a7;
}
}
15 changes: 1 addition & 14 deletions demo/index.js
Original file line number Diff line number Diff line change
@@ -1,21 +1,8 @@
import React from 'react';
import ReactDOM from 'react-dom';
import Demo from './demo';
import { AppContainer } from 'react-hot-loader';

ReactDOM.render(
<AppContainer>
<Demo />
</AppContainer>,
<Demo />,
document.getElementById('root')
);

module.hot.accept('./demo', () => {
const NextDemo = require('./demo').default;
ReactDOM.render(
<AppContainer>
<NextDemo />
</AppContainer>,
document.getElementById('root')
);
});
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"umd"
],
"scripts": {
"start": "webpack-dev-server --hot --inline --port 3000 --config webpack.config.dev.js --content-base public/",
"start": "webpack-dev-server --inline --port 3000 --config webpack.config.dev.js --content-base public/",
"build": "babel src -d lib --copy-files",
"clean": "rimraf dist",
"clean-umd": "rimraf umd",
Expand Down Expand Up @@ -52,7 +52,6 @@
"postcss-loader": "^0.10.1",
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-hot-loader": "^3.0.0-beta.2",
"rimraf": "^2.5.4",
"style-loader": "^0.13.1",
"webpack": "^1.13.1",
Expand Down
89 changes: 89 additions & 0 deletions src/components/recorder.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
// @flow
import React, { PropTypes, Component } from 'react';
import { getWAV, interleave, mergeBuffers } from '../utils/recorder';

type Props = {
children?: any;
onRecordStop?: Function;
smoothingTimeConstant?: number;
isRecording?: boolean
};

type Context = {
audioContext: Object;
connectNode: Object;
};

export default class Recorder extends Component {
context: Context;
props: Props;
static propTypes = {
children: PropTypes.node,
isRecording: PropTypes.bool,
onRecordStop: PropTypes.func,
};
static contextTypes = {
audioContext: PropTypes.object,
connectNode: PropTypes.object,
};
static childContextTypes = {
audioContext: PropTypes.object,
connectNode: PropTypes.object,
};
constructor(props: Props, context: Context) {
super(props);

const bufferSize = 2048;
this.isRecording = props.isRecording;
this.processor = context.audioContext.createScriptProcessor(bufferSize, 2, 2);
this.processor.recordingLength = 0;
this.processor.leftChannel = [];
this.processor.rightChannel = [];
this.onRecordStop = props.onRecordStop;

this.processor.onaudioprocess = (e) => {
const leftIn = e.inputBuffer.getChannelData(0);
const rightIn = e.inputBuffer.getChannelData(1);
// we clone the samples
this.processor.leftChannel.push(new Float32Array(leftIn));
this.processor.rightChannel.push(new Float32Array(rightIn));
this.processor.recordingLength += bufferSize;

// propagate samples to output
const leftOut = e.outputBuffer.getChannelData(0);
const rightOut = e.outputBuffer.getChannelData(1);
for (var i = 0; i < leftIn.length; i++) {
leftOut[i] = leftIn[i];
rightOut[i] = rightIn[i];
}
};

this.processor.stop = () => {
this.isRecording = false;
const left = mergeBuffers(this.processor.leftChannel, this.processor.recordingLength);
const right = mergeBuffers(this.processor.rightChannel, this.processor.recordingLength);
const interleavedChannels = interleave(left, right);
const blob = getWAV(interleavedChannels, context.audioContext.sampleRate);

this.processor.disconnect();
this.onRecordStop(blob);
};

this.connectNode = this.processor;
this.connectNode.connect(context.connectNode);
}
getChildContext(): Object {
return {
...this.context,
connectNode: this.connectNode,
};
}
componentWillReceiveProps(nextProps: Props) {
if (this.props.isRecording && !nextProps.isRecording) {
this.processor.stop();
}
}
render(): React.Element<any> {
return <span>{this.props.children}</span>;
}
}
2 changes: 1 addition & 1 deletion src/components/song.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ export default class Song extends Component {
};
constructor(props: Props) {
super(props);

this.state = {
buffersLoaded: false,
};
Expand Down Expand Up @@ -77,6 +76,7 @@ export default class Song extends Component {
scheduler: this.scheduler,
};
}

componentDidMount() {
if (Object.keys(this.buffers).length === 0) {
this.setState({
Expand Down
2 changes: 2 additions & 0 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import Sequencer from './components/sequencer.js';
import Sampler from './components/sampler.js';
import Song from './components/song.js';
import Synth from './components/synth.js';
import Recorder from './components/recorder.js';

export {
Analyser,
Expand All @@ -36,4 +37,5 @@ export {
Sampler,
Song,
Synth,
Recorder,
};
73 changes: 73 additions & 0 deletions src/utils/recorder.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
// Much inspired by http://typedarray.org/from-microphone-to-wav-with-getusermedia-and-web-audio

export const mergeBuffers = function (channelBuffer, recordingLength) {
const result = new Float32Array(recordingLength);
let offset = 0;
const lng = channelBuffer.length;
for (let i = 0; i < lng; i++) {
const buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
};

export const interleave = function (leftChannel, rightChannel) {
const length = leftChannel.length + rightChannel.length;
const result = new Float32Array(length);

let inputIndex = 0;

for (let index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
};

const writeUTFBytes = function (view, offset, string) {
const lng = string.length;
for (let i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};

// Disabling since it's a higly specialised function
/* eslint-disable max-statements */
export const getWAV = function (interleaved, sampleRate) {
// create the buffer and view to create the .WAV file
const buffer = new ArrayBuffer(44 + interleaved.length * 2);
const view = new DataView(buffer);

// write the WAV container, check spec at: https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);

// write the PCM samples
const lng = interleaved.length;
const volume = 1;
let index = 44;
for (let i = 0; i < lng; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}

// our final binary blob that we can hand off
return new Blob([view], { type: 'audio/wav' });
};
Loading