Welcome to the Java Programming Forums


The professional, friendly Java community. 21,500 members and growing!


The Java Programming Forums are a community of Java programmers from all around the World. Our members have a wide range of skills and they all have one thing in common: A passion to learn and code Java. We invite beginner Java programmers right through to Java professionals to post here and share your knowledge. Become a part of the community, help others, expand your knowledge of Java and enjoy talking with like minded people. Registration is quick and best of all free. We look forward to meeting you.


>> REGISTER NOW TO START POSTING


Members have full access to the forums. Advertisements are removed for registered users.

Results 1 to 5 of 5

Thread: taking sonar/frequency readings with a microphone to be able to display readout

  1. #1
    Junior Member
    Join Date
    Dec 2013
    Posts
    6
    Thanks
    0
    Thanked 0 Times in 0 Posts

    Default taking sonar/frequency readings with a microphone to be able to display readout

    I have a simple code below of how to access the microphone and record and download a file. What I want to do is take this code, gain access to the microphone, and then use the microphone to pick up different frequency readings somehow. Can anyone help me? I am very new and really do not understand any tutorials I have read about FFT or anything further into JAVA audio.



    // variables
    var leftchannel = [];
    var rightchannel = [];
    var recorder = null;
    var recording = false;
    var recordingLength = 0;
    var volume = null;
    var audioInput = null;
    var sampleRate = 44100;
    var audioContext = null;
    var context = null;
    var outputElement = document.getElementById('output');
    var outputString;
     
    // feature detection 
    if (!navigator.getUserMedia)
        navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
                      navigator.mozGetUserMedia || navigator.msGetUserMedia;
     
    if (navigator.getUserMedia){
        navigator.getUserMedia({audio:true}, success, function(e) {
        alert('Error capturing audio.');
        });
    } else alert('getUserMedia not supported in this browser.');
     
    // when key is down
    window.onkeydown = function(e){
     
        // if R is pressed, we start recording
        if ( e.keyCode == 82 ){
            recording = true;
            // reset the buffers for the new recording
            leftchannel.length = rightchannel.length = 0;
            recordingLength = 0;
            outputElement.innerHTML = 'Recording now...';
        // if S is pressed, we stop the recording and package the WAV file
        } else if ( e.keyCode == 83 ){
     
            // we stop recording
            recording = false;
     
            outputElement.innerHTML = 'Building wav file...';
     
            // we flat the left and right channels down
            var leftBuffer = mergeBuffers ( leftchannel, recordingLength );
            var rightBuffer = mergeBuffers ( rightchannel, recordingLength );
            // we interleave both channels together
            var interleaved = interleave ( leftBuffer, rightBuffer );
     
            // we create our wav file
            var buffer = new ArrayBuffer(44 + interleaved.length * 2);
            var view = new DataView(buffer);
     
            // RIFF chunk descriptor
            writeUTFBytes(view, 0, 'RIFF');
            view.setUint32(4, 44 + interleaved.length * 2, true);
            writeUTFBytes(view, 8, 'WAVE');
            // FMT sub-chunk
            writeUTFBytes(view, 12, 'fmt ');
            view.setUint32(16, 16, true);
            view.setUint16(20, 1, true);
            // stereo (2 channels)
            view.setUint16(22, 2, true);
            view.setUint32(24, sampleRate, true);
            view.setUint32(28, sampleRate * 4, true);
            view.setUint16(32, 4, true);
            view.setUint16(34, 16, true);
            // data sub-chunk
            writeUTFBytes(view, 36, 'data');
            view.setUint32(40, interleaved.length * 2, true);
     
            // write the PCM samples
            var lng = interleaved.length;
            var index = 44;
            var volume = 1;
            for (var i = 0; i < lng; i++){
                view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
                index += 2;
            }
     
            // our final binary blob
            var blob = new Blob ( [ view ], { type : 'audio/wav' } );
     
            // let's save it locally
            outputElement.innerHTML = 'Handing off the file now...';
            var url = (window.URL || window.webkitURL).createObjectURL(blob);
            var link = window.document.createElement('a');
            link.href = url;
            link.download = 'output.wav';
            var click = document.createEvent("Event");
            click.initEvent("click", true, true);
            link.dispatchEvent(click);
        }
    }
     
    function interleave(leftChannel, rightChannel){
      var length = leftChannel.length + rightChannel.length;
      var result = new Float32Array(length);
     
      var inputIndex = 0;
     
      for (var index = 0; index < length; ){
        result[index++] = leftChannel[inputIndex];
        result[index++] = rightChannel[inputIndex];
        inputIndex++;
      }
      return result;
    }
     
    function mergeBuffers(channelBuffer, recordingLength){
      var result = new Float32Array(recordingLength);
      var offset = 0;
      var lng = channelBuffer.length;
      for (var i = 0; i < lng; i++){
        var buffer = channelBuffer[i];
        result.set(buffer, offset);
        offset += buffer.length;
      }
      return result;
    }
     
    function writeUTFBytes(view, offset, string){ 
      var lng = string.length;
      for (var i = 0; i < lng; i++){
        view.setUint8(offset + i, string.charCodeAt(i));
      }
    }
     
    function success(e){
        // creates the audio context
        audioContext = window.AudioContext || window.webkitAudioContext;
        context = new audioContext();
     
        // creates a gain node
        volume = context.createGain();
     
        // creates an audio node from the microphone incoming stream
        audioInput = context.createMediaStreamSource(e);
     
        // connect the stream to the gain node
        audioInput.connect(volume);
     
        /* From the spec: This value controls how frequently the audioprocess event is 
        dispatched and how many sample-frames need to be processed each call. 
        Lower values for buffer size will result in a lower (better) latency. 
        Higher values will be necessary to avoid audio breakup and glitches */
        var bufferSize = 2048;
        recorder = context.createJavaScriptNode(bufferSize, 2, 2);
     
        recorder.onaudioprocess = function(e){
            if (!recording) return;
            var left = e.inputBuffer.getChannelData (0);
            var right = e.inputBuffer.getChannelData (1);
            // we clone the samples
            leftchannel.push (new Float32Array (left));
            rightchannel.push (new Float32Array (right));
            recordingLength += bufferSize;
        }
     
        // we connect the recorder
        volume.connect (recorder);
        recorder.connect (context.destination); 
    }


  2. #2
    Super Moderator Norm's Avatar
    Join Date
    May 2010
    Location
    Eastern Florida
    Posts
    25,140
    Thanks
    65
    Thanked 2,720 Times in 2,670 Posts

    Default Re: taking sonar/frequency readings with a microphone to be able to display readout

    Please edit your post and wrap your code with code tags:
    [code=java]
    YOUR CODE HERE
    [/code]
    to get highlighting and preserve formatting.
    If you don't understand my answer, don't ignore it, ask a question.

  3. #3
    Super Moderator
    Join Date
    Jun 2013
    Location
    So. Maryland, USA
    Posts
    5,517
    My Mood
    Mellow
    Thanks
    215
    Thanked 698 Times in 680 Posts

    Default Re: taking sonar/frequency readings with a microphone to be able to display readout

    Guess you didn't read that Announcements topic I asked you to read.

  4. #4
    Super Moderator Norm's Avatar
    Join Date
    May 2010
    Location
    Eastern Florida
    Posts
    25,140
    Thanks
    65
    Thanked 2,720 Times in 2,670 Posts

    Default Re: taking sonar/frequency readings with a microphone to be able to display readout

    Another problem I just noticed, the posted code is not java. It looks like javascript.
    If you don't understand my answer, don't ignore it, ask a question.

  5. #5
    Junior Member
    Join Date
    Dec 2013
    Posts
    6
    Thanks
    0
    Thanked 0 Times in 0 Posts

    Default Re: taking sonar/frequency readings with a microphone to be able to display readout

    What is the difference Norm?

    --- Update ---

    Can I use javascript instead of JAVA to do this? Either way the whole process seems complicated to me

    --- Update ---

    Oh I see now it says javascript is something completely different. Well can I use JAVA to make myself an app for phones? If not, what is JAVA?

Similar Threads

  1. Help with creating a java sonar amplet, microphone and speaker
    By shiftaction in forum Java Theory & Questions
    Replies: 4
    Last Post: December 20th, 2013, 01:00 PM
  2. Detecting sound from microphone using Java. Issue specific to Java Versions
    By ssbattousai in forum File I/O & Other I/O Streams
    Replies: 3
    Last Post: July 11th, 2013, 03:06 AM
  3. [SOLVED] frequency count/Big Oh
    By edvir in forum What's Wrong With My Code?
    Replies: 1
    Last Post: June 25th, 2012, 07:24 AM
  4. Filtering microphone sound
    By badoumba in forum Java Theory & Questions
    Replies: 0
    Last Post: April 28th, 2012, 08:02 AM