Introduction
In this page you can find the example usage for android.media AudioTrack write.
Prototype
public int write(@NonNull ByteBuffer audioData, int sizeInBytes, @WriteMode int writeMode)
Source Link
Document
Writes the audio data to the audio sink for playback (streaming mode), or copies audio data for later playback (static buffer mode).
Usage
From source file:
Main.java
public static void addSound(AudioTrack track, int samplingRate, float freq) {
track.write(convertFreq2Bytes(freq, samplingRate), 0, samplingRate);
From source file:
net.reichholf.dreamdroid.fragment.SignalFragment.java
void playSound(double freqOfTone) {
double duration = 0.1; // seconds
int sampleRate = 8000; // a number
double dnumSamples = duration * sampleRate;
dnumSamples = Math.ceil(dnumSamples);
int numSamples = (int) dnumSamples;
double sample[] = new double[numSamples];
byte generatedSnd[] = new byte[2 * numSamples];
for (int i = 0; i < numSamples; ++i) { // Fill the sample array
sample[i] = Math.sin(freqOfTone * 2 * Math.PI * i / (sampleRate));
}//from ww w . j a v a 2 s .c o m
// convert to 16 bit pcm sound array
// assumes the sample buffer is normalized.
int idx = 0;
int i = 0;
int ramp = numSamples / 20; // Amplitude ramp as a percent of sample
// count
for (i = 0; i < numSamples; ++i) { // Ramp amplitude up (to avoid
// clicks)
if (i < ramp) {
double dVal = sample[i];
// Ramp up to maximum
final short val = (short) ((dVal * 32767 * i / ramp));
// in 16 bit wav PCM, first byte is the low order byte
generatedSnd[idx++] = (byte) (val & 0x00ff);
generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
} else if (i < numSamples - ramp) {
// Max amplitude for most of the samples
double dVal = sample[i];
// scale to maximum amplitude
final short val = (short) ((dVal * 32767));
// in 16 bit wav PCM, first byte is the low order byte
generatedSnd[idx++] = (byte) (val & 0x00ff);
generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
} else {
double dVal = sample[i];
// Ramp down to zero
final short val = (short) ((dVal * 32767 * (numSamples - i) / ramp));
// in 16 bit wav PCM, first byte is the low order byte
generatedSnd[idx++] = (byte) (val & 0x00ff);
generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
AudioTrack audioTrack = null; // Get audio track
try {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, (int) numSamples * 2, AudioTrack.MODE_STATIC);
// Load the track
audioTrack.write(generatedSnd, 0, generatedSnd.length);
audioTrack.play(); // Play the track
} catch (Exception e) {
int x = 0;
do { // Montior playback to find when done
if (audioTrack != null)
x = audioTrack.getPlaybackHeadPosition();
x = numSamples;
} while (x < numSamples);
if (audioTrack != null)
audioTrack.release(); // Track play done. Release track.
From source file:
zlyh.dmitry.recaller.threading.PlayBlockThread.java
@Override
public void run() {
AudioTrack audioTrack = null;
FileInputStream in = null;//from w ww. ja v a 2 s . c o m
try {
File rawpcm = new File(path);
if (!rawpcm.exists()) {
this.interrupt();
togglePlaying(true);
final int audioLength = (int) rawpcm.length();
final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
final int block = 256 * 1024;
byte[] byteData = new byte[block];
try {
in = new FileInputStream(rawpcm);
} catch (FileNotFoundException e) {
e.printStackTrace();
this.interrupt();
if (in != null) {
try {
int bytesread = 0;
int offset;
audioTrack.play();
while (bytesread < audioLength && !isInterrupted()) {
offset = in.read(byteData, 0, block);
if (offset != -1) {
audioTrack.write(byteData, 0, offset);
bytesread += offset;
} else {
break;
in.close();
togglePlaying(false);
if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.stop();
if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
audioTrack.release();
} catch (Exception e) {
e.printStackTrace();
try {
in.close();
} catch (IOException e1) {
e1.printStackTrace();
if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.stop();
if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
audioTrack.release();
togglePlaying(false);
} catch (Exception e) {
e.printStackTrace();
if (audioTrack != null) {
if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.stop();
if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
audioTrack.release();
if (in != null) {
try {
in.close();
} catch (IOException e1) {
e1.printStackTrace();
togglePlaying(false);