RTP: Extend codec capability and update the APIs.

Change-Id: I37ba9d83c2de3c5dae2bfc1b7513df7f6fee3c5c
diff --git a/voip/java/android/net/rtp/AudioCodec.java b/voip/java/android/net/rtp/AudioCodec.java
index 89e6aa9..4851a46 100644
--- a/voip/java/android/net/rtp/AudioCodec.java
+++ b/voip/java/android/net/rtp/AudioCodec.java
@@ -16,41 +16,133 @@
 
 package android.net.rtp;
 
-/** @hide */
+import java.util.Arrays;
+
+/**
+ * This class defines a collection of audio codecs to be used with
+ * {@link AudioStream}s. Their parameters are designed to be exchanged using
+ * Session Description Protocol (SDP). Most of the values listed here can be
+ * found in RFC 3551, while others are described in separated standards.
+ *
+ * <p>Few simple configurations are defined as public static instances for the
+ * convenience of direct uses. More complicated ones could be obtained using
+ * {@link #getCodec(int, String, String)}. For example, one can use the
+ * following snippet to create a mode-1-only AMR codec.</p>
+ * <pre>
+ * AudioCodec codec = AudioCodec.getCodec(100, "AMR/8000", "mode-set=1");
+ * </pre>
+ *
+ * @see AudioStream
+ * @hide
+ */
 public class AudioCodec {
-    public static final AudioCodec ULAW = new AudioCodec("PCMU", 8000, 160, 0);
-    public static final AudioCodec ALAW = new AudioCodec("PCMA", 8000, 160, 8);
+    /**
+     * The RTP payload type of the encoding.
+     */
+    public final int type;
 
     /**
-     * Returns system supported codecs.
+     * The encoding parameters to be used in the corresponding SDP attribute.
      */
-    public static AudioCodec[] getSystemSupportedCodecs() {
-        return new AudioCodec[] {AudioCodec.ULAW, AudioCodec.ALAW};
+    public final String rtpmap;
+
+    /**
+     * The format parameters to be used in the corresponding SDP attribute.
+     */
+    public final String fmtp;
+
+    /**
+     * G.711 u-law audio codec.
+     */
+    public static final AudioCodec PCMU = new AudioCodec(0, "PCMU/8000", null);
+
+    /**
+     * G.711 a-law audio codec.
+     */
+    public static final AudioCodec PCMA = new AudioCodec(8, "PCMA/8000", null);
+
+    /**
+     * GSM Full-Rate audio codec, also known as GSM-FR, GSM 06.10, GSM, or
+     * simply FR.
+     */
+    public static final AudioCodec GSM = new AudioCodec(3, "GSM/8000", null);
+
+    /**
+     * GSM Enhanced Full-Rate audio codec, also known as GSM-EFR, GSM 06.60, or
+     * simply EFR.
+     */
+    public static final AudioCodec GSM_EFR = new AudioCodec(96, "GSM-EFR/8000", null);
+
+    /**
+     * Adaptive Multi-Rate narrowband audio codec, also known as AMR or AMR-NB.
+     * Currently CRC, robust sorting, and interleaving are not supported. See
+     * more details about these features in RFC 4867.
+     */
+    public static final AudioCodec AMR = new AudioCodec(97, "AMR/8000", null);
+
+    // TODO: add rest of the codecs when the native part is done.
+    private static final AudioCodec[] sCodecs = {PCMU, PCMA};
+
+    private AudioCodec(int type, String rtpmap, String fmtp) {
+        this.type = type;
+        this.rtpmap = rtpmap;
+        this.fmtp = fmtp;
     }
 
     /**
-     * Returns the codec instance if it is supported by the system.
+     * Returns system supported audio codecs.
+     */
+    public static AudioCodec[] getCodecs() {
+        return Arrays.copyOf(sCodecs, sCodecs.length);
+    }
+
+    /**
+     * Creates an AudioCodec according to the given configuration.
      *
-     * @param name name of the codec
-     * @return the matched codec or null if the codec name is not supported by
-     *      the system
+     * @param type The payload type of the encoding defined in RTP/AVP.
+     * @param rtpmap The encoding parameters specified in the corresponding SDP
+     *     attribute, or null if it is not available.
+     * @param fmtp The format parameters specified in the corresponding SDP
+     *     attribute, or null if it is not available.
+     * @return The configured AudioCodec or {@code null} if it is not supported.
      */
-    public static AudioCodec getSystemSupportedCodec(String name) {
-        for (AudioCodec codec : getSystemSupportedCodecs()) {
-            if (codec.name.equals(name)) return codec;
+    public static AudioCodec getCodec(int type, String rtpmap, String fmtp) {
+        if (type < 0 || type > 127) {
+            return null;
         }
-        return null;
-    }
 
-    public final String name;
-    public final int sampleRate;
-    public final int sampleCount;
-    public final int defaultType;
+        AudioCodec hint = null;
+        if (rtpmap != null) {
+            String clue = rtpmap.trim().toUpperCase();
+            for (AudioCodec codec : sCodecs) {
+                if (clue.startsWith(codec.rtpmap)) {
+                    String channels = clue.substring(codec.rtpmap.length());
+                    if (channels.length() == 0 || channels.equals("/1")) {
+                        hint = codec;
+                    }
+                    break;
+                }
+            }
+        } else if (type < 96) {
+            for (AudioCodec codec : sCodecs) {
+                if (type == codec.type) {
+                    hint = codec;
+                    rtpmap = codec.rtpmap;
+                    break;
+                }
+            }
+        }
 
-    private AudioCodec(String name, int sampleRate, int sampleCount, int defaultType) {
-        this.name = name;
-        this.sampleRate = sampleRate;
-        this.sampleCount = sampleCount;
-        this.defaultType = defaultType;
+        if (hint == null) {
+            return null;
+        }
+        if (hint == AMR && fmtp != null) {
+            String clue = fmtp.toLowerCase();
+            if (clue.contains("crc=1") || clue.contains("robust-sorting=1") ||
+                    clue.contains("interleaving=")) {
+                return null;
+            }
+        }
+        return new AudioCodec(type, rtpmap, fmtp);
     }
 }
diff --git a/voip/java/android/net/rtp/AudioGroup.java b/voip/java/android/net/rtp/AudioGroup.java
index 37cc121..43a3827 100644
--- a/voip/java/android/net/rtp/AudioGroup.java
+++ b/voip/java/android/net/rtp/AudioGroup.java
@@ -20,13 +20,63 @@
 import java.util.Map;
 
 /**
+ * An AudioGroup acts as a router connected to the speaker, the microphone, and
+ * {@link AudioStream}s. Its pipeline has four steps. First, for each
+ * AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
+ * packets and stores in its buffer. Then, if the microphone is enabled,
+ * processes the recorded audio and stores in its buffer. Third, if the speaker
+ * is enabled, mixes and playbacks buffers of all AudioStreams. Finally, for
+ * each AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
+ * buffers and sends back the encoded packets. An AudioGroup does nothing if
+ * there is no AudioStream in it.
+ *
+ * <p>Few things must be noticed before using these classes. The performance is
+ * highly related to the system load and the network bandwidth. Usually a
+ * simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
+ * bandwidth, and vise versa. Using two AudioStreams at the same time not only
+ * doubles the load but also the bandwidth. The condition varies from one device
+ * to another, and developers must choose the right combination in order to get
+ * the best result.
+ *
+ * <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
+ * example, a Voice over IP (VoIP) application might want to put a conference
+ * call on hold in order to make a new call but still allow people in the
+ * previous call to talk to each other. This can be done easily using two
+ * AudioGroups, but there are some limitations. Since the speaker and the
+ * microphone are shared globally, only one AudioGroup is allowed to run in
+ * modes other than {@link #MODE_ON_HOLD}. In addition, before adding an
+ * AudioStream into an AudioGroup, one should always put all other AudioGroups
+ * into {@link #MODE_ON_HOLD}. That will make sure the audio driver correctly
+ * initialized.
+ * @hide
  */
-/** @hide */
 public class AudioGroup {
+    /**
+     * This mode is similar to {@link #MODE_NORMAL} except the speaker and
+     * the microphone are disabled.
+     */
     public static final int MODE_ON_HOLD = 0;
+
+    /**
+     * This mode is similar to {@link #MODE_NORMAL} except the microphone is
+     * muted.
+     */
     public static final int MODE_MUTED = 1;
+
+    /**
+     * This mode indicates that the speaker, the microphone, and all
+     * {@link AudioStream}s in the group are enabled. First, the packets
+     * received from the streams are decoded and mixed with the audio recorded
+     * from the microphone. Then, the results are played back to the speaker,
+     * encoded and sent back to each stream.
+     */
     public static final int MODE_NORMAL = 2;
-    public static final int MODE_EC_ENABLED = 3;
+
+    /**
+     * This mode is similar to {@link #MODE_NORMAL} except the echo suppression
+     * is enabled. It should be only used when the speaker phone is on.
+     */
+    public static final int MODE_ECHO_SUPPRESSION = 3;
 
     private final Map<AudioStream, Integer> mStreams;
     private int mMode = MODE_ON_HOLD;
@@ -36,23 +86,42 @@
         System.loadLibrary("rtp_jni");
     }
 
+    /**
+     * Creates an empty AudioGroup.
+     */
     public AudioGroup() {
         mStreams = new HashMap<AudioStream, Integer>();
     }
 
+    /**
+     * Returns the current mode.
+     */
     public int getMode() {
         return mMode;
     }
 
+    /**
+     * Changes the current mode. It must be one of {@link #MODE_ON_HOLD},
+     * {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and
+     * {@link #MODE_ECHO_SUPPRESSION}.
+     *
+     * @param mode The mode to change to.
+     * @throws IllegalArgumentException if the mode is invalid.
+     */
     public synchronized native void setMode(int mode);
 
-    synchronized void add(AudioStream stream, AudioCodec codec, int codecType, int dtmfType) {
+    private native void add(int mode, int socket, String remoteAddress,
+            int remotePort, String codecSpec, int dtmfType);
+
+    synchronized void add(AudioStream stream, AudioCodec codec, int dtmfType) {
         if (!mStreams.containsKey(stream)) {
             try {
                 int socket = stream.dup();
+                String codecSpec = String.format("%d %s %s", codec.type,
+                        codec.rtpmap, codec.fmtp);
                 add(stream.getMode(), socket,
-                        stream.getRemoteAddress().getHostAddress(), stream.getRemotePort(),
-                        codec.name, codec.sampleRate, codec.sampleCount, codecType, dtmfType);
+                        stream.getRemoteAddress().getHostAddress(),
+                        stream.getRemotePort(), codecSpec, dtmfType);
                 mStreams.put(stream, socket);
             } catch (NullPointerException e) {
                 throw new IllegalStateException(e);
@@ -60,8 +129,7 @@
         }
     }
 
-    private native void add(int mode, int socket, String remoteAddress, int remotePort,
-            String codecName, int sampleRate, int sampleCount, int codecType, int dtmfType);
+    private native void remove(int socket);
 
     synchronized void remove(AudioStream stream) {
         Integer socket = mStreams.remove(stream);
@@ -70,8 +138,6 @@
         }
     }
 
-    private native void remove(int socket);
-
     /**
      * Sends a DTMF digit to every {@link AudioStream} in this group. Currently
      * only event {@code 0} to {@code 15} are supported.
@@ -80,13 +146,16 @@
      */
     public native synchronized void sendDtmf(int event);
 
-    public synchronized void reset() {
+    /**
+     * Removes every {@link AudioStream} in this group.
+     */
+    public synchronized void clear() {
         remove(-1);
     }
 
     @Override
     protected void finalize() throws Throwable {
-        reset();
+        clear();
         super.finalize();
     }
 }
diff --git a/voip/java/android/net/rtp/AudioStream.java b/voip/java/android/net/rtp/AudioStream.java
index a955fd2..908aada 100644
--- a/voip/java/android/net/rtp/AudioStream.java
+++ b/voip/java/android/net/rtp/AudioStream.java
@@ -20,12 +20,27 @@
 import java.net.SocketException;
 
 /**
- * AudioStream represents a RTP stream carrying audio payloads.
+ * An AudioStream is a {@link RtpStream} which carrys audio payloads over
+ * Real-time Transport Protocol (RTP). Two different classes are developed in
+ * order to support various usages such as audio conferencing. An AudioStream
+ * represents a remote endpoint which consists of a network mapping and a
+ * configured {@link AudioCodec}. On the other side, An {@link AudioGroup}
+ * represents a local endpoint which mixes all the AudioStreams and optionally
+ * interacts with the speaker and the microphone at the same time. The simplest
+ * usage includes one for each endpoints. For other combinations, users should
+ * be aware of the limitations described in {@link AudioGroup}.
+ *
+ * <p>An AudioStream becomes busy when it joins an AudioGroup. In this case most
+ * of the setter methods are disabled. This is designed to ease the task of
+ * managing native resources. One can always make an AudioStream leave its
+ * AudioGroup by calling {@link #join(AudioGroup)} with {@code null} and put it
+ * back after the modification is done.
+ *
+ * @see AudioGroup
+ * @hide
  */
-/** @hide */
 public class AudioStream extends RtpStream {
     private AudioCodec mCodec;
-    private int mCodecType = -1;
     private int mDtmfType = -1;
     private AudioGroup mGroup;
 
@@ -42,7 +57,8 @@
     }
 
     /**
-     * Returns {@code true} if the stream already joined an {@link AudioGroup}.
+     * Returns {@code true} if the stream has already joined an
+     * {@link AudioGroup}.
      */
     @Override
     public final boolean isBusy() {
@@ -52,7 +68,7 @@
     /**
      * Returns the joined {@link AudioGroup}.
      */
-    public AudioGroup getAudioGroup() {
+    public AudioGroup getGroup() {
         return mGroup;
     }
 
@@ -74,35 +90,26 @@
             mGroup = null;
         }
         if (group != null) {
-            group.add(this, mCodec, mCodecType, mDtmfType);
+            group.add(this, mCodec, mDtmfType);
             mGroup = group;
         }
     }
 
     /**
-     * Sets the {@link AudioCodec} and its RTP payload type. According to RFC
-     * 3551, the type must be in the range of 0 and 127, where 96 and above are
-     * dynamic types. For codecs with static mappings (non-negative
-     * {@link AudioCodec#defaultType}), assigning a different non-dynamic type
-     * is disallowed.
+     * Sets the {@link AudioCodec}.
      *
      * @param codec The AudioCodec to be used.
-     * @param type The RTP payload type.
-     * @throws IllegalArgumentException if the type is invalid or used by DTMF.
+     * @throws IllegalArgumentException if its type is used by DTMF.
      * @throws IllegalStateException if the stream is busy.
      */
-    public void setCodec(AudioCodec codec, int type) {
+    public void setCodec(AudioCodec codec) {
         if (isBusy()) {
             throw new IllegalStateException("Busy");
         }
-        if (type < 0 || type > 127 || (type != codec.defaultType && type < 96)) {
-            throw new IllegalArgumentException("Invalid type");
-        }
-        if (type == mDtmfType) {
+        if (codec.type == mDtmfType) {
             throw new IllegalArgumentException("The type is used by DTMF");
         }
         mCodec = codec;
-        mCodecType = type;
     }
 
     /**
@@ -127,7 +134,7 @@
             if (type < 96 || type > 127) {
                 throw new IllegalArgumentException("Invalid type");
             }
-            if (type == mCodecType) {
+            if (type == mCodec.type) {
                 throw new IllegalArgumentException("The type is used by codec");
             }
         }
diff --git a/voip/java/android/net/rtp/RtpStream.java b/voip/java/android/net/rtp/RtpStream.java
index ef5ca17..23fb258 100644
--- a/voip/java/android/net/rtp/RtpStream.java
+++ b/voip/java/android/net/rtp/RtpStream.java
@@ -22,13 +22,25 @@
 import java.net.SocketException;
 
 /**
- * RtpStream represents a base class of media streams running over
- * Real-time Transport Protocol (RTP).
+ * RtpStream represents the base class of streams which send and receive network
+ * packets with media payloads over Real-time Transport Protocol (RTP).
+ * @hide
  */
-/** @hide */
 public class RtpStream {
+    /**
+     * This mode indicates that the stream sends and receives packets at the
+     * same time. This is the initial mode for new streams.
+     */
     public static final int MODE_NORMAL = 0;
+
+    /**
+     * This mode indicates that the stream only sends packets.
+     */
     public static final int MODE_SEND_ONLY = 1;
+
+    /**
+     * This mode indicates that the stream only receives packets.
+     */
     public static final int MODE_RECEIVE_ONLY = 2;
 
     private final InetAddress mLocalAddress;
@@ -89,15 +101,16 @@
     }
 
     /**
-     * Returns {@code true} if the stream is busy. This method is intended to be
-     * overridden by subclasses.
+     * Returns {@code true} if the stream is busy. In this case most of the
+     * setter methods are disabled. This method is intended to be overridden
+     * by subclasses.
      */
     public boolean isBusy() {
         return false;
     }
 
     /**
-     * Returns the current mode. The initial mode is {@link #MODE_NORMAL}.
+     * Returns the current mode.
      */
     public int getMode() {
         return mMode;
@@ -123,7 +136,8 @@
     }
 
     /**
-     * Associates with a remote host.
+     * Associates with a remote host. This defines the destination of the
+     * outgoing packets.
      *
      * @param address The network address of the remote host.
      * @param port The network port of the remote host.