summaryrefslogtreecommitdiffstats
path: root/voip
diff options
context:
space:
mode:
authorChia-chi Yeh <chiachi@android.com>2011-03-03 07:59:00 +0800
committerChia-chi Yeh <chiachi@android.com>2011-03-03 08:01:01 +0800
commitc52f5b2ec5e13ab3d9ab016e6cab757d4ecb45c7 (patch)
tree4e4d8639ad216e4edfce2852ec7f1c5eeb38e238 /voip
parent0528bc77948e49bd75cfd4082fb8fef3a7238626 (diff)
downloadframeworks_base-c52f5b2ec5e13ab3d9ab016e6cab757d4ecb45c7.zip
frameworks_base-c52f5b2ec5e13ab3d9ab016e6cab757d4ecb45c7.tar.gz
frameworks_base-c52f5b2ec5e13ab3d9ab016e6cab757d4ecb45c7.tar.bz2
RTP: update javadocs.
Change-Id: If600df0eb1e6135aed9f3b2eacfb6bc9ed5d78ff
Diffstat (limited to 'voip')
-rw-r--r--voip/java/android/net/rtp/AudioGroup.java86
-rw-r--r--voip/java/android/net/rtp/AudioStream.java4
2 files changed, 46 insertions, 44 deletions
diff --git a/voip/java/android/net/rtp/AudioGroup.java b/voip/java/android/net/rtp/AudioGroup.java
index a6b54d8..20c8969 100644
--- a/voip/java/android/net/rtp/AudioGroup.java
+++ b/voip/java/android/net/rtp/AudioGroup.java
@@ -16,41 +16,47 @@
package android.net.rtp;
+import android.media.AudioManager;
+
import java.util.HashMap;
import java.util.Map;
/**
- * An AudioGroup acts as a router connected to the speaker, the microphone, and
- * {@link AudioStream}s. Its execution loop consists of four steps. First, for
- * each AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its
- * incoming packets and stores in its buffer. Then, if the microphone is
- * enabled, processes the recorded audio and stores in its buffer. Third, if the
- * speaker is enabled, mixes and playbacks buffers of all AudioStreams. Finally,
- * for each AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all
- * other buffers and sends back the encoded packets. An AudioGroup does nothing
- * if there is no AudioStream in it.
+ * An AudioGroup is an audio hub for the speaker, the microphone, and
+ * {@link AudioStream}s. Each of these components can be logically turned on
+ * or off by calling {@link #setMode(int)} or {@link RtpStream#setMode(int)}.
+ * The AudioGroup will go through these components and process them one by one
+ * within its execution loop. The loop consists of four steps. First, for each
+ * AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
+ * packets and stores in its buffer. Then, if the microphone is enabled,
+ * processes the recorded audio and stores in its buffer. Third, if the speaker
+ * is enabled, mixes all AudioStream buffers and plays back. Finally, for each
+ * AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
+ * buffers and sends back the encoded packets. An AudioGroup does nothing if
+ * there is no AudioStream in it.
*
* <p>Few things must be noticed before using these classes. The performance is
* highly related to the system load and the network bandwidth. Usually a
* simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
- * bandwidth, and vise versa. Using two AudioStreams at the same time not only
- * doubles the load but also the bandwidth. The condition varies from one device
- * to another, and developers must choose the right combination in order to get
- * the best result.
+ * bandwidth, and vise versa. Using two AudioStreams at the same time doubles
+ * not only the load but also the bandwidth. The condition varies from one
+ * device to another, and developers should choose the right combination in
+ * order to get the best result.</p>
*
* <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
* example, a Voice over IP (VoIP) application might want to put a conference
* call on hold in order to make a new call but still allow people in the
- * previous call to talk to each other. This can be done easily using two
+ * conference call talking to each other. This can be done easily using two
* AudioGroups, but there are some limitations. Since the speaker and the
- * microphone are shared globally, only one AudioGroup is allowed to run in
- * modes other than {@link #MODE_ON_HOLD}. In addition, before adding an
- * AudioStream into an AudioGroup, one should always put all other AudioGroups
- * into {@link #MODE_ON_HOLD}. That will make sure the audio driver correctly
- * initialized.</p>
+ * microphone are globally shared resources, only one AudioGroup at a time is
+ * allowed to run in a mode other than {@link #MODE_ON_HOLD}. The others will
+ * be unable to acquire these resources and fail silently.</p>
*
* <p class="note">Using this class requires
- * {@link android.Manifest.permission#RECORD_AUDIO} permission.</p>
+ * {@link android.Manifest.permission#RECORD_AUDIO} permission. Developers
+ * should set the audio mode to {@link AudioManager#MODE_IN_COMMUNICATION}
+ * using {@link AudioManager#setMode(int)} and change it back when none of
+ * the AudioGroups is in use.</p>
*
* @see AudioStream
* @hide
@@ -58,13 +64,13 @@ import java.util.Map;
public class AudioGroup {
/**
* This mode is similar to {@link #MODE_NORMAL} except the speaker and
- * the microphone are disabled.
+ * the microphone are both disabled.
*/
public static final int MODE_ON_HOLD = 0;
/**
* This mode is similar to {@link #MODE_NORMAL} except the microphone is
- * muted.
+ * disabled.
*/
public static final int MODE_MUTED = 1;
@@ -137,20 +143,18 @@ public class AudioGroup {
private native void nativeSetMode(int mode);
// Package-private method used by AudioStream.join().
- void add(AudioStream stream, AudioCodec codec, int dtmfType) {
- synchronized (this) {
- if (!mStreams.containsKey(stream)) {
- try {
- int socket = stream.dup();
- String codecSpec = String.format("%d %s %s", codec.type,
- codec.rtpmap, codec.fmtp);
- nativeAdd(stream.getMode(), socket,
- stream.getRemoteAddress().getHostAddress(),
- stream.getRemotePort(), codecSpec, dtmfType);
- mStreams.put(stream, socket);
- } catch (NullPointerException e) {
- throw new IllegalStateException(e);
- }
+ synchronized void add(AudioStream stream, AudioCodec codec, int dtmfType) {
+ if (!mStreams.containsKey(stream)) {
+ try {
+ int socket = stream.dup();
+ String codecSpec = String.format("%d %s %s", codec.type,
+ codec.rtpmap, codec.fmtp);
+ nativeAdd(stream.getMode(), socket,
+ stream.getRemoteAddress().getHostAddress(),
+ stream.getRemotePort(), codecSpec, dtmfType);
+ mStreams.put(stream, socket);
+ } catch (NullPointerException e) {
+ throw new IllegalStateException(e);
}
}
}
@@ -159,12 +163,10 @@ public class AudioGroup {
int remotePort, String codecSpec, int dtmfType);
// Package-private method used by AudioStream.join().
- void remove(AudioStream stream) {
- synchronized (this) {
- Integer socket = mStreams.remove(stream);
- if (socket != null) {
- nativeRemove(socket);
- }
+ synchronized void remove(AudioStream stream) {
+ Integer socket = mStreams.remove(stream);
+ if (socket != null) {
+ nativeRemove(socket);
}
}
diff --git a/voip/java/android/net/rtp/AudioStream.java b/voip/java/android/net/rtp/AudioStream.java
index 0edae6b..b45cc5e 100644
--- a/voip/java/android/net/rtp/AudioStream.java
+++ b/voip/java/android/net/rtp/AudioStream.java
@@ -27,8 +27,8 @@ import java.net.SocketException;
* configured {@link AudioCodec}. On the other side, An {@link AudioGroup}
* represents a local endpoint which mixes all the AudioStreams and optionally
* interacts with the speaker and the microphone at the same time. The simplest
- * usage includes one for each endpoints. For other combinations, users should
- * be aware of the limitations described in {@link AudioGroup}.
+ * usage includes one for each endpoints. For other combinations, developers
+ * should be aware of the limitations described in {@link AudioGroup}.
*
* <p>An AudioStream becomes busy when it joins an AudioGroup. In this case most
* of the setter methods are disabled. This is designed to ease the task of