One document matched: draft-schmidt-avt-rfc3016bis-01.xml


<?xml version="1.0" encoding="us-ascii"?>
<!DOCTYPE rfc PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/rfc2629.dtd"[
  <!ENTITY RFC2119 PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/reference.RFC.2119.xml">
  <!ENTITY RFC2629 PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/reference.RFC.2629.xml">
  <!ENTITY RFC3016 PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/reference.RFC.3016.xml">
  <!ENTITY RFC3550 PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/reference.RFC.3550.xml">
  <!ENTITY RFC3640 PUBLIC '' "http://xml.resource.org/public/rfc/bibxml/reference.RFC.3640.xml">
]>
<?xml-stylesheet type='text/xsl' href="http://greenbytes.de/tech/webdav/rfc2629xslt/rfc2629.xslt" ?>
<?rfc strict="yes" ?>
<?rfc toc="yes"?>
<!-- generate a ToC -->
<?rfc tocdepth="4"?>
<!-- the number of levels of subsections in ToC. default: 3 -->
<!-- control references -->
<?rfc symrefs="yes"?>
<!-- use symbolic references tags, i.e, [RFC2119] instead of [1] -->
<?rfc sortrefs="yes" ?>
<!-- sort the reference entries alphabetically -->
<!-- control vertical white space 
     (using these PIs as follows is recommended by the RFC Editor) -->
<?rfc compact="yes" ?>
<!-- do not start each main section on a new page -->
<?rfc subcompact="no" ?>
<!-- keep one blank line between list items -->
<!-- end of list of popular I-D processing instructions -->
<rfc category="std"
     docName="draft-schmidt-avt-rfc3016bis-01.txt"
     ipr="trust200902"
     obsoletes=""
     updates="3016"
     submissionType="IETF"
     xml:lang="en">
  <!-- category values: std, bcp, info, exp, and historic
     ipr values: full3667, noModification3667, noDerivatives3667
     you can add the attributes updates="NNNN" and obsoletes="NNNN" 
     they will automatically be output with "(if approved)" -->
  <!-- ***** FRONT MATTER ***** -->
  <front>
    <!-- The abbreviated title is used in the page header - it is only necessary if the 
         full title is longer than 39 characters -->
    <title abbrev="RTP Payload Format for MPEG-4 Streams">RTP Payload Format for MPEG-4 Audio/Visual Streams</title>
    <!-- add 'role="editor"' below for the editors if appropriate -->
    <!-- Another author who claims to be an editor -->

    <author fullname="Malte Schmidt"
            initials="M.S."
            surname="Schmidt">
      <organization>Dolby Laboratories</organization>
      <address>
        <postal>
          <street>Deutschherrnstr. 15-19</street>
          <!-- Reorder these if your country does things differently -->
          <city>90537 Nuernberg</city>
          <region></region>
          <country>DE</country>
        </postal>
        <phone>+49 911 928 91 42</phone>
        <email>malte.schmidt@dolby.com</email>
        <!-- uri and facsimile elements may also be added -->
      </address>

    </author>
    <author fullname="Frans de Bont"
            initials="F.d.B."
            surname="de Bont">
      <organization>Philips Electronics</organization>
      <address>
        <postal>
          <street>High Tech Campus 5</street>
          <!-- Reorder these if your country does things differently -->
          <city>5656 AE Eindhoven</city>
          <region></region>
          <country>NL</country>
        </postal>
        <phone>++31 40 2740234</phone>
        <email>frans.de.bont@philips.com</email>
        <!-- uri and facsimile elements may also be added -->
      </address>
    </author>

    <author fullname="Stefan Doehla"
            initials="S.D."
            surname="Doehla">
      <organization>Fraunhofer IIS</organization>
      <address>
        <postal>
          <street>Am Wolfmantel 33</street>
          <!-- Reorder these if your country does things differently -->
          <city>91058 Erlangen</city>
          <region></region>
          <country>DE</country>
        </postal>
        <phone>+49 9131 776 6042</phone>
        <email>stefan.doehla@iis.fraunhofer.de</email>
        <!-- uri and facsimile elements may also be added -->
      </address>
    </author>

  <!-- Removed the last two authors of the original list due to the limitations of at most five authors on this list
    <author fullname="Yoshihiro Kikuchi"
            initials="Y."
            surname="Kikuchi">
      <organization>Toshiba corporation</organization>
      <address>
        <postal>
          <street>1, Komukai Toshiba-cho, Saiwai-ku</street>
          <city>Kawasaki, 212-8582</city>
          <region></region>
          <country>JP</country>
        </postal>
        <email>yoshihiro.kikuchi@toshiba.co.jp</email>
      </address>
    </author>

    <author fullname="Yoshinori Matsui"
            initials="Y."
            surname="Matsui">
      <organization>Matsushita Electric Industrial Co., LTD.</organization>
      <address>
        <postal>
          <street>1006, Kadoma, Kadoma-shi, </street>
          <city>Osaka</city>
          <region></region>
          <country>JP</country>
        </postal>
        <email>matsui@drl.mei.co.jp</email>
      </address>
    </author>

    <author fullname="Toshiyuki Nomura"
            initials="T."
            surname="Nomura">
      <organization>NEC Corporation</organization>
      <address>
        <postal>
          <street>4-1-1, Miyazaki, Miyamae-ku</street>
          <city>Kawasaki</city>
          <region></region>
          <country>JP</country>
        </postal>
        <email>t-nomura@ccm.cl.nec.co.jp</email>
      </address>
    </author>

    <author fullname="Shigeru Fukunaga"
            initials="S."
            surname="Fukunaga">
      <organization>Oki Electric Industry Co., Ltd.</organization>
      <address>
        <postal>
          <street>1-2-27 Shiromi, Chuo-ku</street>
          <city>Osaka 540-6025</city>
          <region></region>
          <country>JP</country>
        </postal>
        <email>fukunaga444@oki.co.jp</email>
      </address>
    </author>

    <author fullname="Hideaki Kimata"
            initials="H."
            surname="Kimata">
      <organization>Nippon Telegraph and Telephone Corporation</organization>
      <address>
        <postal>
          <street>1-1, Hikari-no-oka, Yokosuka-shi</street>
          <city>Kanagawa</city>
          <region></region>
          <country>JP</country>
        </postal>
        <email>kimata@nttvdt.hil.ntt.co.jp</email>
      </address>
    </author>
 -->

    <date year="2009" />
    <!-- If the month and year are both specified and are the current ones, xml2rfc will fill 
         in the current day for you. If only the current year is specified, xml2rfc will fill 
	 in the current day and month for you. If the year is not the current one, it is 
	 necessary to specify at least a month (xml2rfc assumes day="1" if not specified for the 
	 purpose of calculating the expiry date).  With drafts it is normally sufficient to 
	 specify just the year. -->
    <!-- Meta-data Declarations -->
    <area>AVT</area>
    <workgroup>AVT</workgroup>
    <!-- WG name at the upperleft corner of the doc,
         IETF is fine for individual submissions.  
	 If this element is not present, the default is "Network Working Group",
         which is used by the RFC Editor as a nod to the history of the IETF. -->
    <keyword>RFC3016, RTP, MPEG-4, Audio, Visual, AAC, MPEG Surround</keyword>
    <!-- Keywords will be incorporated into HTML output
         files in a meta tag but they have no effect on text or nroff
         output. If you submit your draft to the RFC Editor, the
         keywords will be used for the search engine. -->
    <abstract>

      <t>
        This document describes Real-Time Transport Protocol (RTP) payload formats 
        for carrying each of MPEG-4 Audio and MPEG-4 Visual bitstreams without using MPEG-4
        Systems. For the purpose of directly mapping MPEG-4 Audio/Visual bitstreams onto RTP
        packets, it provides specifications for the use of RTP header fields and also specifies
        fragmentation rules. It also provides specifications for Multipurpose Internet Mail
        Extensions (MIME) type registrations and the use of Session Description Protocol (SDP).
      </t>
      <t>
        Comments are solicited and should be addressed to the working group's
        mailing list at avt@ietf.org and/or the author(s).
      </t>
    </abstract>
  </front>

  <!-- ----------------------------------------------------------------- -->

  <middle>
    <section title="Introduction"
             toc="default">
      <t>
        The RTP payload formats described in this document specify how MPEG-4 Audio
        <xref target="14496-3"></xref> and MPEG-4 Visual streams <xref target="14496-2"></xref>
        <xref target="14496-2/Amd.1"></xref> are to be fragmented and mapped directly
        onto RTP packets.
      </t>
      <t>
        These RTP payload formats enable transport of MPEG-4 Audio/Visual streams
        without using the synchronization and stream management functionality of
        MPEG-4 Systems <xref target="14496-1"></xref>.  Such RTP payload formats
        will be used in systems that
        have intrinsic stream management functionality and thus require no such
        functionality from MPEG-4 Systems.  H.323 terminals are an example of such
        systems, where MPEG-4 Audio/Visual streams are not managed by MPEG-4
        Systems Object Descriptors but by H.245.  The streams are directly mapped
        onto RTP packets without using MPEG-4 Systems Sync Layer.  Other examples
        are SIP and RTSP where MIME and SDP are used.  MIME types and SDP usages
        of the RTP payload formats described in this document are defined to
        directly specify the attribute of Audio/Visual streams (e.g., media type,
        packetization format and codec configuration) without using MPEG-4
        Systems.  The obvious benefit is that these MPEG-4 Audio/Visual RTP
        payload formats can be handled in an unified way together with those
        formats defined for non-MPEG-4 codecs.  The disadvantage is that
        interoperability with environments using MPEG-4 Systems may be difficult,
        other payload formats may be better suited to those applications.
      </t>
      <t>
        The semantics of RTP headers in such cases need to be clearly defined,
        including the association with MPEG-4 Audio/Visual data elements.  In
        addition, it is beneficial to define the fragmentation rules of RTP
        packets for MPEG-4 Video streams so as to enhance error resiliency by
        utilizing the error resilience tools provided inside the MPEG-4 Video
        stream.
      </t>
      <t>
        The RTP payload formats described in this document have been specified in
        RFC 3016 and are used by the 3GPP PSS service. However there are some
        misalignments between RFC 3016 and the 3GPP PSS specification that are
        addressed by this update:
        <list hangIndent="0"
              style="symbols">
          <t>The audio payload format (LATM) referenced in RFC 3016 is
               binary incompatible to the format used in 3GPP.</t>
          <t>The audio signalling format (StreamMuxConfig) referenced
               in RFC 3016 is binary incompatible to the format used in 3GPP.</t>
          <t>The audio parameter "SBR-enabled" is not defined within
               RFC 3016 but used by 3GPP</t>
          <t>The rate parameter specification in ambiguous in the
               presence of SBR (Spectral Band Replication)</t>
        </list>
        Furthermore some comments have been addressed and signalling support for
        MPEG surround <xref target="23003-1"></xref> was added. It should be noted that the audio payload format
        described here has some known limitations. For new system designs
        <xref target="RFC3640">RFC 3640</xref> is recommended.
      </t>

    <section title="MPEG-4 Visual RTP payload format"
             toc="default">
      <t>
        MPEG-4 Visual is a visual coding standard with many new features: high
        coding efficiency; high error resiliency; multiple, arbitrary shape
        object-based coding; etc. <xref target="14496-2"></xref>.  It covers
        a wide range of bitrates from
        scores of Kbps to several Mbps.  It also covers a wide variety of
        networks, ranging from those guaranteed to be almost error-free to mobile
        networks with high error rates.
      </t>
      <t>
        With respect to the fragmentation rules for an MPEG-4 Visual bitstream
        defined in this document, since MPEG-4 Visual is used for a wide variety
        of networks, it is desirable not to apply too much restriction on
        fragmentation, and a fragmentation rule such as "a single video packet
        shall always be mapped on a single RTP packet" may be inappropriate.  On
        the other hand, careless, media unaware fragmentation may cause
        degradation in error resiliency and bandwidth efficiency.  The
        fragmentation rules described in this document are flexible but manage to
        define the minimum rules for preventing meaningless fragmentation while
        utilizing the error resilience functionalities of MPEG-4 Visual.
      </t>
      <t>
        The fragmentation rule recommends not to map more than one VOP in an RTP
        packet so that the RTP timestamp uniquely indicates the VOP time framing.
        On the other hand, MPEG-4 video may generate VOPs of very small size, in
        cases with an empty VOP (vop_coded=0) containing only VOP header or an
        arbitrary shaped VOP with a small number of coding blocks.  To reduce the
        overhead for such cases, the fragmentation rule permits concatenating
        multiple VOPs in an RTP packet.  (See fragmentation rule (4) in section
        3.2 and marker bit and timestamp in section 3.1.)
      </t>
      <t>
        While the additional media specific RTP header defined for such video
        coding tools as H.261 or MPEG-1/2 is effective in helping to recover
        picture headers corrupted by packet losses, MPEG-4 Visual has already
        error resilience functionalities for recovering corrupt headers, and
        these can be used on RTP/IP networks as well as on other networks
        (H.223/mobile, MPEG-2/TS, etc.).  Therefore, no extra RTP header fields
        are defined in this MPEG-4 Visual RTP payload format.
      </t>
    </section>

    <section title="MPEG-4 Audio RTP payload format"
             toc="default">
      <t>
        MPEG-4 Audio is a new kind of audio standard that integrates many
        different types of audio coding tools.  Low-overhead MPEG-4 Audio
        Transport Multiplex (LATM) manages the sequences of audio data with
        relatively small overhead.  In audio-only applications, then, it is
        desirable for LATM-based MPEG-4 Audio bitstreams to be directly mapped
        onto the RTP packets without using MPEG-4 Systems.
      </t>
      <t>
        While LATM has several multiplexing features as follows;
        <list hangIndent="0"
              style="symbols">
          <t>Carrying configuration information with audio data,</t>
          <t>Concatenation of multiple audio frames in one audio stream,</t>
          <t>Multiplexing multiple objects (programs),</t>
          <t>Multiplexing scalable layers,</t>
        </list>
        in RTP transmission there is no need for the last two features.
        Therefore, these two features MUST NOT be used in applications based on
        RTP packetization specified by this document.  Since LATM has been
        developed for only natural audio coding tools, i.e., not for synthesis
        tools, it seems difficult to transmit Structured Audio (SA) data and Text
        to Speech Interface (TTSI) data by LATM.  Therefore, SA data and TTSI data
        MUST NOT be transported by the RTP packetization in this document.
      </t>
      <t>
        For transmission of scalable streams, audio data of each layer SHOULD be
        packetized onto different RTP packets allowing for the different layers
        to be treated differently at the IP level, for example via some means of
        differentiated service.  On the other hand, all configuration data of the
        scalable streams are contained in one LATM configuration data
        "StreamMuxConfig" and every scalable layer shares the StreamMuxConfig.
        The mapping between each layer and its configuration data is achieved by
        LATM header information attached to the audio data.  In order to indicate
        the dependency information of the scalable streams, a restriction is
        applied to the dynamic assignment rule of payload type (PT) values (see
        section 4.2).
      </t>
      <t>
        For MPEG-4 Audio coding tools, as is true for other audio coders, if the
        payload is a single audio frame, packet loss will not impair the
        decodability of adjacent packets.  Therefore, the additional media
        specific header for recovering errors will not be required for MPEG-4
        Audio.  Existing RTP protection mechanisms, such as Generic Forward Error
        Correction (RFC 2733) and Redundant Audio Data (RFC 2198), MAY be applied
        to improve error resiliency.
      </t>
    </section>
    </section>

    <section title="Conventions"
             toc="default">
      <t>
        The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
        "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
        document are to be interpreted as described in <xref target="RFC2119" />.
      </t>
    </section>

    <section title="RTP Packetization of MPEG-4 Visual bitstream"
             toc="default">
      <t>
        This section specifies RTP packetization rules for MPEG-4 Visual
        content.  An MPEG-4 Visual bitstream is mapped directly onto RTP
        packets without the addition of extra header fields or any removal of
        Visual syntax elements.  The Combined Configuration/Elementary stream
        mode MUST be used so that configuration information will be carried to
        the same RTP port as the elementary stream.  (see 6.2.1 "Start codes"
        of ISO/IEC 14496-2 <xref target="14496-2"></xref> <xref target="14496-2/Cor.1"></xref>
        <xref target="14496-2/Amd.1"></xref>) The configuration information MAY
        additionally be specified by some out-of-band means.  If needed for an
        H.323 terminal, H.245 codepoint "decoderConfigurationInformation" MUST
        be used for this purpose.  If needed by systems using MIME content
        type and SDP parameters, e.g., SIP and RTSP, the optional parameter
        "config" MUST be used to specify the configuration information (see
        5.1 and 5.2).
      </t>
      <t>
        When the short video header mode is used, the RTP payload format for
        H.263 SHOULD be used (the format defined in RFC 4629 is RECOMMENDED, but
        the RFC 4628 format MAY be used for compatibility with older
        implementations).
      </t>
      <t>
        <figure>
          <artwork>
0                   1                   2                   3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X|  CC   |M|     PT      |       sequence number         | RTP
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|                           timestamp                           | Header
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|           synchronization source (SSRC) identifier            |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
|            contributing source (CSRC) identifiers             |
|                             ....                              |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
|                                                               | RTP
|       MPEG-4 Visual stream (byte aligned)                     | Pay-
|                                                               | load
|                               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|                               :...OPTIONAL RTP padding        |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+

     Figure 1 - An RTP packet for MPEG-4 Visual stream
          </artwork>
        </figure>
      </t>
      <section title="Use of RTP header fields for MPEG-4 Visual"
               toc="default">
        <t>
          Payload Type (PT): The assignment of an RTP payload type for this new
          packet format is outside the scope of this document, and will not be
          specified here.  It is expected that the RTP profile for a particular
          class of applications will assign a payload type for this encoding, or if
          that is not done then a payload type in the dynamic range SHALL be chosen
          by means of an out of band signaling protocol (e.g., H.245, SIP, etc).
        </t>
        <t>
          Extension (X) bit: Defined by the RTP profile used.
        </t>
        <t>
          Sequence Number: Incremented by one for each RTP data packet sent,
          starting, for security reasons, with a random initial value.
        </t>
        <t>
          Marker (M) bit: The marker bit is set to one to indicate the last RTP
          packet (or only RTP packet) of a VOP.  When multiple VOPs are carried in
          the same RTP packet, the marker bit is set to one.
        </t>
        <t>
          Timestamp: The timestamp indicates the sampling instance of the VOP
          contained in the RTP packet.  A constant offset, which is random, is added
          for security reasons.
        <list hangIndent="1"
              style="symbols">
          <t>When multiple VOPs are carried in the same RTP packet, the timestamp
             indicates the earliest of the VOP times within the VOPs carried in the
             RTP packet.  Timestamp information of the rest of the VOPs are derived
             from the timestamp fields in the VOP header (modulo_time_base and
             vop_time_increment).</t>
          <t>If the RTP packet contains only configuration information and/or
             Group_of_VideoObjectPlane() fields, the timestamp of the next VOP in
             the coding order is used.</t>
          <t>If the RTP packet contains only visual_object_sequence_end_code
             information, the timestamp of the immediately preceding VOP in the
             coding order is used.</t>
        </list>
        </t>
        <t>
          The resolution of the timestamp is set to its default value of 90kHz,
          unless specified by an out-of-band means (e.g., SDP parameter or MIME
          parameter as defined in section 5).
        </t>
        <t>
          Other header fields are used as described in RFC 3550 <xref target="RFC3550"></xref>.
        </t>
      </section>
      <section title="Fragmentation of MPEG-4 Visual bitstream"
               toc="default">
        <t>
          A fragmented MPEG-4 Visual bitstream is mapped directly onto the RTP
          payload without any addition of extra header fields or any removal of
          Visual syntax elements.  The Combined Configuration/Elementary streams
          mode is used.  The following rules apply for the fragmentation.
        </t>
        <t>
          In the following, header means one of the following:
        <list hangIndent="1"
              style="symbols">
          <t>Configuration information (Visual Object Sequence Header, Visual Object
             Header and Video Object Layer Header)</t>
          <t>visual_object_sequence_end_code</t>
          <t>The header of the entry point function for an elementary stream
             (Group_of_VideoObjectPlane() or the header of VideoObjectPlane(),
             video_plane_with_short_header(), MeshObject() or FaceObject())</t>
          <t>The video packet header (video_packet_header() excluding
             next_resync_marker())</t>
          <t>The header of gob_layer()</t>
          <t>See 6.2.1 "Start codes" of ISO/IEC 14496-2 <xref target="14496-2"></xref>
             <xref target="14496-2/Cor.1"></xref> <xref target="14496-2/Amd.1"></xref> for the
             definition of the configuration information and the entry point functions.</t>
        </list>
        </t>
        <t>
          (1) Configuration information and Group_of_VideoObjectPlane() fields
          SHALL be placed at the beginning of the RTP payload (just after the RTP
          header) or just after the header of the syntactically upper layer
          function.
        </t>
        <t>
          (2) If one or more headers exist in the RTP payload, the RTP payload
          SHALL begin with the header of the syntactically highest function.
          Note: The visual_object_sequence_end_code is regarded as the lowest
          function.
        </t>
        <t>
          (3) A header SHALL NOT be split into a plurality of RTP packets.
        </t>
        <t>
          (4) Different VOPs SHOULD be fragmented into different RTP packets so
          that one RTP packet consists of the data bytes associated with a unique
          VOP time instance (that is indicated in the timestamp field in the RTP
          packet header), with the exception that multiple consecutive VOPs MAY be
          carried within one RTP packet in the decoding order if the size of the
          VOPs is small.
        </t>
        <t>
          Note: When multiple VOPs are carried in one RTP payload, the timestamp of
          the VOPs after the first one may be calculated by the decoder.  This
          operation is necessary only for RTP packets in which the marker bit
          equals to one and the beginning of RTP payload corresponds to a start
          code. (See timestamp and marker bit in section 3.1.)
        </t>
        <t>
          (5) It is RECOMMENDED that a single video packet is sent as a single RTP
          packet.  The size of a video packet SHOULD be adjusted in such a way that
          the resulting RTP packet is not larger than the path-MTU.
          Note: Rule (5) does not apply when the video packet is disabled by the
          coder configuration (by setting resync_marker_disable in the VOL header
          to 1), or in coding tools where the video packet is not supported.  In
          this case, a VOP MAY be split at arbitrary byte-positions.
        </t>
        <t>
          The video packet starts with the VOP header or the video packet header,
          followed by motion_shape_texture(), and ends with next_resync_marker() or
          next_start_code().
        </t>
      </section>
      <section title="Examples of packetized MPEG-4 Visual bitstream"
               toc="default">
        <t>
          Figure 2 shows examples of RTP packets generated based on the criteria
          described in 3.2
        </t>
        <t>
          (a) is an example of the first RTP packet or the random access point of
          an MPEG-4 Visual bitstream containing the configuration information.
          According to criterion (1), the Visual Object Sequence Header(VS header)
          is placed at the beginning of the RTP payload, preceding the Visual
          Object Header and the Video Object Layer Header(VO header, VOL header).
          Since the fragmentation rule defined in 3.2 guarantees that the
          configuration information, starting with
          visual_object_sequence_start_code, is always placed at the beginning of
          the RTP payload, RTP receivers can detect the random access point by
          checking if the first 32-bit field of the RTP payload is
          visual_object_sequence_start_code.
        </t>
        <t>
          (b) is another example of the RTP packet containing the configuration
          information.  It differs from example (a) in that the RTP packet also
          contains a video packet in the VOP following the configuration
          information.  Since the length of the configuration information is
          relatively short (typically scores of bytes) and an RTP packet containing
          only the configuration information may thus increase the overhead, the
          configuration information and the immediately following GOV and/or (a
          part of) VOP can be packetized into a single RTP packet as in this
          example.
        </t>
        <t>
          (c) is an example of an RTP packet that contains
          Group_of_VideoObjectPlane(GOV).  Following criterion (1), the GOV is
          placed at the beginning of the RTP payload.  It would be a waste of RTP/IP
          header overhead to generate an RTP packet containing only a GOV whose
          length is 7 bytes.  Therefore, (a part of) the following VOP can be placed
          in the same RTP packet as shown in (c).
        </t>
        <t>
          (d) is an example of the case where one video packet is packetized into
          one RTP packet.  When the packet-loss rate of the underlying network is
          high, this kind of packetization is recommended.  Even when the RTP packet
          containing the VOP header is discarded by a packet loss, the other RTP
          packets can be decoded by using the HEC(Header Extension Code)
          information in the video packet header.  No extra RTP header field is
          necessary.
        </t>
        <t>
          (e) is an example of the case where more than one video packet is
          packetized into one RTP packet.  This kind of packetization is effective
          to save the overhead of RTP/IP headers when the bit-rate of the
          underlying network is low.  However, it will decrease the packet-loss
          resiliency because multiple video packets are discarded by a single RTP
          packet loss.  The optimal number of video packets in an RTP packet and the
          length of the RTP packet can be determined considering the packet-loss
          rate and the bit-rate of the underlying network.
        </t>
        <t>
          (f) is an example of the case when the video packet is disabled by
          setting resync_marker_disable in the VOL header to 1.  In this case, a VOP
          may be split into a plurality of RTP packets at arbitrary byte-positions.
          For example, it is possible to split a VOP into fixed-length packets.
          This kind of coder configuration and RTP packet fragmentation may be used
          when the underlying network is guaranteed to be error-free.  On the other
          hand, it is not recommended to use it in error-prone environment since it
          provides only poor packet loss resiliency.
        </t>
        <t>
          Figure 3 shows examples of RTP packets prohibited by the criteria of 3.2.
        </t>
        <t>
          Fragmentation of a header into multiple RTP packets, as in (a), will not
          only increase the overhead of RTP/IP headers but also decrease the error
          resiliency.  Therefore, it is prohibited by the criterion (3).
        </t>
        <t>
          When concatenating more than one video packets into an RTP packet, VOP
          header or video_packet_header() shall not be placed in the middle of the
          RTP payload.  The packetization as in (b) is not allowed by criterion (2)
          due to the aspect of the error resiliency.  Comparing this example with
          Figure 2(d), although two video packets are mapped onto two RTP packets
          in both cases, the packet-loss resiliency is not identical.  Namely, if
          the second RTP packet is lost, both video packets 1 and 2 are lost in the
          case of Figure 3(b) whereas only video packet 2 is lost in the case of
          Figure 2(d).
        </t>
        <t>
        <figure>
          <artwork>
    +------+------+------+------+
(a) | RTP  |  VS  |  VO  | VOL  |
    |header|header|header|header|
    +------+------+------+------+

    +------+------+------+------+------+------------+
(b) | RTP  |  VS  |  VO  | VOL  | VOP  |Video Packet|
    |header|header|header|header|header|            |
    +------+------+------+------+------+------------+

    +------+-----+------------------+
(c) | RTP  | GOV |Video Object Plane|
    |header|     |                  |
    +------+-----+------------------+

    +------+------+------------+  +------+------+------------+
(d) | RTP  | VOP  |Video Packet|  | RTP  |  VP  |Video Packet|
    |header|header|    (1)     |  |header|header|    (2)     |
    +------+------+------------+  +------+------+------------+

    +------+------+------------+------+------------+------+------------+
(e) | RTP  |  VP  |Video Packet|  VP  |Video Packet|  VP  |Video Packet|
    |header|header|     (1)    |header|    (2)     |header|    (3)     |
    +------+------+------------+------+------------+------+------------+

    +------+------+------------+  +------+------------+
(f) | RTP  | VOP  |VOP fragment|  | RTP  |VOP fragment|
    |header|header|    (1)     |  |header|    (2)     | ___
    +------+------+------------+  +------+------------+

     Figure 2 - Examples of RTP packetized MPEG-4 Visual bitstream
          </artwork>
        </figure>
        </t>
        <t>
        <figure>
          <artwork>
    +------+-------------+  +------+------------+------------+
(a) | RTP  |First half of|  | RTP  |Last half of|Video Packet|
    |header|  VP header  |  |header|  VP header |            |
    +------+-------------+  +------+------------+------------+

    +------+------+----------+  +------+---------+------+------------+
(b) | RTP  | VOP  |First half|  | RTP  |Last half|  VP  |Video Packet|
    |header|header| of VP(1) |  |header| of VP(1)|header|    (2)     |
    +------+------+----------+  +------+---------+------+------------+

   Figure 3 - Examples of prohibited RTP packetization for MPEG-4 Visual
   bitstream
          </artwork>
        </figure>
        </t>
      </section>
    </section>
    <section title="RTP Packetization of MPEG-4 Audio bitstream" anchor="RTP Packetization of MPEG-4 Audio bitstreams">
      <t>
        This section specifies RTP packetization rules for MPEG-4 Audio
        bitstreams.  MPEG-4 Audio streams MUST be formatted by LATM (Low-overhead
        MPEG-4 Audio Transport Multiplex) tool <xref target="14496-3"></xref>,
        and the LATM-based streams are then mapped onto RTP packets as
        described the three sections below.
      </t>
      <section title="RTP Packet Format" anchor="RTP Packet Format"
               toc="default">
        <t>
          LATM-based streams consist of a sequence of audioMuxElements that
          include one or more PayloadMux Elements which carry the audio frames.
          A complete audioMuxElement or a part of one SHALL be mapped directly
          onto an RTP payload without any removal of audioMuxElement syntax
          elements (see Figure 4).  The first byte of each audioMuxElement
          SHALL be located at the first payload location in an RTP packet.
        </t>
        <figure>
          <artwork>
0                   1                   2                   3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X|  CC   |M|     PT      |       sequence number         |RTP
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|                           timestamp                           |Header
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|           synchronization source (SSRC) identifier            |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
|            contributing source (CSRC) identifiers             |
|                             ....                              |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
|                                                               |RTP
:                 audioMuxElement (byte aligned)                :Payload
|                                                               |
|                               +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|                               :...OPTIONAL RTP padding        |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+

             Figure 4 - An RTP packet for MPEG-4 Audio
          </artwork>
        </figure>
        <t>
          In order to decode the audioMuxElement, the following muxConfigPresent
          information is required to be indicated by an out-of-band means.  When SDP
          is utilized for this indication, MIME parameter "cpresent" corresponds to
          the muxConfigPresent information (see section 5.3). The following restrictions apply:
        <list hangIndent="1"
              style="symbols">
          <t>In the out-of-band signalling case the number of PayloadMux
             Elements contained in each audioMuxElement can only be set once.
             If values greater than one PayloadMux Element are used, special care
             is required to ensure that the last RTP packet remains decodable.</t>
          <t>In the in-band siganlling case the audio frames are in general
             not byte aligned. Hinting RTP payload from MP4 file format
             <xref target="14496-12"></xref> <xref target="14496-14"></xref>
             is therefore not possible.</t>
        </list>
        </t>
        <t>
          muxConfigPresent: If this value is set to 1 (in-band mode), the
          audioMuxElement SHALL include an indication bit "useSameStreamMux" and
          MAY include the configuration information for audio compression
          "StreamMuxConfig".  The useSameStreamMux bit indicates whether the
          StreamMuxConfig element in the previous frame is applied in the current
          frame.  If the useSameStreamMux bit indicates to use the StreamMuxConfig
          from the previous frame, but if the previous frame has been lost, the
          current frame may not be decodable.  Therefore, in case of in-band mode,
          the StreamMuxConfig element SHOULD be transmitted repeatedly depending on
          the network condition. On the other hand, if muxConfigPresent is set to 0
          (out-band mode), the StreamMuxConfig element is required to be
          transmitted by an out-of-band means.  In case of SDP, MIME parameter
          "config" is utilized (see section 5.3).
        </t>
      </section>
      <section title="Use of RTP Header Fields for MPEG-4 Audio"
               toc="default">
        <t>
          Payload Type (PT): The assignment of an RTP payload type for this new
          packet format is outside the scope of this document, and will not be
          specified here.  It is expected that the RTP profile for a particular
          class of applications will assign a payload type for this encoding, or if
          that is not done then a payload type in the dynamic range shall be chosen
          by means of an out of band signaling protocol (e.g., H.245, SIP, etc).  In
          the dynamic assignment of RTP payload types for scalable streams, a
          different value SHOULD be assigned to each layer.  The assigned values
          SHOULD be in order of enhance layer dependency, where the base layer has
          the smallest value.
        </t>
        <t>
          Marker (M) bit: The marker bit indicates audioMuxElement boundaries.  It
          is set to one to indicate that the RTP packet contains a complete
          audioMuxElement or the last fragment of an audioMuxElement.
        </t>
        <t>
          Timestamp: The timestamp indicates the sampling instance of the first
          audio frame contained in the RTP packet.  Timestamps are recommended to
          start at a random value for security reasons.
        </t>
        <t>
          Unless specified by an out-of-band means, the resolution of the timestamp
          is set to its default value of 90 kHz.
        </t>
        <t>
          Sequence Number: Incremented by one for each RTP packet sent, starting,
          for security reasons, with a random value.
        </t>
        <t>
          Other header fields are used as described in RFC 3550 <xref target="RFC3550"></xref>.
        </t>
      </section>
      <section title="Fragmentation of MPEG-4 Audio bitstream"
               toc="default">
        <t>
          It is RECOMMENDED to put one audioMuxElement in each RTP packet.  If the
          size of an audioMuxElement can be kept small enough that the size of the
          RTP packet containing it does not exceed the size of the path-MTU, this
          will be no problem.  If it cannot, the audioMuxElement MAY be fragmented
          and spread across multiple packets.
        </t>
      </section>
    </section>
    <section title="MIME type registration for MPEG-4 Audio/Visual streams"
             toc="default">
      <t>
        The following sections describe the MIME type registrations for MPEG-4
        Audio/Visual streams.  MIME type registration and SDP usage for the MPEG-4
        Visual stream are described in Sections 5.1 and 5.2, respectively, while
        MIME type registration and SDP usage for MPEG-4 Audio stream are
        described in Sections 5.3 and 5.4, respectively.
      </t>
      <section title="MIME type registration for MPEG-4 Visual"
               toc="default">
        <t>
          MIME media type name: video
        </t>
        <t>
          MIME subtype name: MP4V-ES
        </t>
        <t>
          Required parameters: none
        </t>
        <t>
          Optional parameters:
        <list hangIndent="1"
              style="empty">
          <t>
            rate: This parameter is used only for RTP transport.  It indicates the
            resolution of the timestamp field in the RTP header.  If this parameter
            is not specified, its default value of 90000 (90kHz) is used.
          </t>
          <t>
            profile-level-id: A decimal representation of MPEG-4 Visual Profile
            and Level indication value (profile_and_level_indication) defined in
            Table G-1 of ISO/IEC 14496-2 <xref target="14496-2"></xref>
            <xref target="14496-2/Amd.1"></xref>.  This parameter MAY be used in
            the capability exchange or session setup procedure to indicate MPEG-4
            Visual Profile and Level combination of which the MPEG-4 Visual codec
            is capable.  If this parameter is not specified by the procedure, its
            default value of 1 (Simple Profile/Level 1) is used. 
          </t>
          <t>
            config: This parameter SHALL be used to indicate the configuration of
            the corresponding MPEG-4 Visual bitstream.  It SHALL NOT be used to
            indicate the codec capability in the capability exchange procedure.  It
            is a hexadecimal representation of an octet string that expresses the
            MPEG-4 Visual configuration information, as defined in subclause 6.2.1
            Start codes of ISO/IEC14496-2 <xref target="14496-2"></xref>
            <xref target="14496-2/Amd.1"></xref> <xref target="14496-2/Cor.1"></xref>.
            The configuration information
            is mapped onto the octet string in an MSB-first basis.  The first bit
            of the configuration information SHALL be located at the MSB of the
            first octet.  The configuration information indicated by this parameter
            SHALL be the same as the configuration information in the
            corresponding MPEG-4 Visual stream, except for
            first_half_vbv_occupancy and latter_half_vbv_occupancy, if exist,
            which may vary in the repeated configuration information inside an
            MPEG-4 Visual stream (See 6.2.1 Start codes of ISO/IEC14496-2).
          </t>
          <t>
            Example usages for these parameters are:
          <list hangIndent="1"
                style="symbols">
            <t>
              MPEG-4 Visual Simple Profile/Level 1:
              Content-type: video/mp4v-es; profile-level-id=1
            </t>
            <t>
              MPEG-4 Visual Core Profile/Level 2:
              Content-type: video/mp4v-es; profile-level-id=34
            </t>
            <t>
              MPEG-4 Visual Advanced Real Time Simple Profile/Level 1:
              Content-type: video/mp4v-es; profile-level-id=145
            </t>
          </list>
          </t>
        </list>
        </t>
        <t>
          Published specification:
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              The specifications for MPEG-4 Visual streams are presented in ISO/IEC
              14469-2 <xref target="14496-2"></xref> <xref target="14496-2/Amd.1"></xref>
              <xref target="14496-2/Cor.1"></xref>. The RTP payload format is described in RFC 3016.
            </t>
          </list>
        </t>
        <t>
          Encoding considerations:
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              Video bitstreams MUST be generated according to MPEG-4 Visual
              specifications (ISO/IEC 14496-2).  A video bitstream is binary data and
              MUST be encoded for non-binary transport (for Email, the Base64
              encoding is sufficient).  This type is also defined for transfer via
              RTP.  The RTP packets MUST be packetized according to the MPEG-4 Visual
              RTP payload format defined in RFC 3016.
            </t>
          </list>
        </t>
        <t>
          Security considerations:
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              See section 6 of RFC 3016.
            </t>
          </list>
        </t>
        <t>
          Interoperability considerations:
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              MPEG-4 Visual provides a large and rich set of tools for the coding of
              visual objects.  For effective implementation of the standard, subsets
              of the MPEG-4 Visual tool sets have been provided for use in specific
              applications.  These subsets, called 'Profiles', limit the size of the
              tool set a decoder is required to implement.  In order to restrict
              computational complexity, one or more Levels are set for each Profile.
              A Profile@Level combination allows:
            </t>
            <t>
              o a codec builder to implement only the subset of the standard he
              needs, while maintaining interworking with other MPEG-4 devices
              included in the same combination, and
            </t>
            <t>
              o checking whether MPEG-4 devices comply with the standard
              ('conformance testing').
            </t>
          </list>
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              The visual stream SHALL be compliant with the MPEG-4 Visual
              Profile@Level specified by the parameter "profile-level-id".
              Interoperability between a sender and a receiver may be achieved by
              specifying the parameter "profile-level-id" in MIME content, or by
              arranging in the capability exchange/announcement procedure to set this
              parameter mutually to the same value.
            </t>
          </list>
        </t>
        <t>
          Applications which use this media type:
        </t>
        <t>
          <list hangIndent="1"
                style="empty">
            <t>
              Audio and visual streaming and conferencing tools, Internet messaging
              and Email applications.
            </t>
          </list>
        </t>
        <t>
          Additional information: none
        </t>
        <t>
          Person & email address to contact for further information:
          <list hangIndent="1"
                style="empty">
            <t>
              The authors of RFC 3016.  (See section 8.)
            </t>
          </list>
        </t>
        <t>
          Intended usage: COMMON
        </t>
        <t>
          Author/Change controller:
          <list hangIndent="1"
                style="empty">
            <t>
              The authors of RFC 3016.  (See section 8.)
            </t>
          </list>
        </t>
      </section>
      <section title="SDP usage of MPEG-4 Visual"
               toc="default">
        <t>
          The MIME media type video/MP4V-ES string is mapped to fields in the
          Session Description Protocol (SDP), RFC 4566, as follows:
          <list hangIndent="1"
                style="symbols">
            <t>
              The MIME type (video) goes in SDP "m=" as the media name.
            </t>
            <t>
              The MIME subtype (MP4V-ES) goes in SDP "a=rtpmap" as the encoding name.
            </t>
            <t>
              The optional parameter "rate" goes in "a=rtpmap" as the clock rate.
            </t>
            <t>
              The optional parameter "profile-level-id" and "config" go in the
              "a=fmtp" line to indicate the coder capability and configuration,
              respectively.  These parameters are expressed as a MIME media type string,
              in the form of as a semicolon separated list of parameter=value pairs.
            </t>
          </list>
        </t>
        <t>
          The following are some examples of media representation in SDP:
        </t>
        <t>
          <figure>
            <artwork>
Simple Profile/Level 1, rate=90000(90kHz), "profile-level-id" and
"config" are present in "a=fmtp" line:
  m=video 49170/2 RTP/AVP 98
  a=rtpmap:98 MP4V-ES/90000
  a=fmtp:98 profile-level-id=1;config=000001B001000001B50900000100000001
     20008440FA282C2090A21F

Core Profile/Level 2, rate=90000(90kHz), "profile-level-id" is present
in "a=fmtp" line:
  m=video 49170/2 RTP/AVP 98
  a=rtpmap:98 MP4V-ES/90000
  a=fmtp:98 profile-level-id=34

Advance Real Time Simple Profile/Level 1, rate=90000(90kHz),
"profile-level-id" is present in "a=fmtp" line:
  m=video 49170/2 RTP/AVP 98
  a=rtpmap:98 MP4V-ES/90000
  a=fmtp:98 profile-level-id=145
            </artwork>
          </figure>
        </t>
      </section>
      <section title="MIME type registration of MPEG-4 Audio"
               toc="default">
        <t>
          MIME media type name: audio
        </t>
        <t>
          MIME subtype name: MP4A-LATM
        </t>
        <t>
          Required parameters:
          <list hangIndent="1"
                style="empty">
            <t>
              rate: the rate parameter indicates the RTP time stamp clock rate.  The
              default value is 90000.  Other rates MAY be specified only if they are
              set to the same value as the audio sampling rate (number of samples
              per second). 
            </t>
            <t>
              In the presence of SBR (Spectral Band Replication) the sampling rates
              for the core en-/decoder and the SBR tool differ in most cases. This
              parameter shall therefore not be considered as the definitive sampling
              rate. If this parameter is used the following recommendations apply to servers:
              <list hangIndent="1"
                    style="symbols">
                <t>
                  When the presence of SBR is not explicitly signalled by the optional
                  SDP parameters: object parameter, profile-level-id or config string, this
                  parameter shall be set to the core codec sampling rate.
                </t>
                <t>
                  When the presence of SBR is explicitly signalled by the optional SDP
                  parameters: object parameter, profile-level-id or config string this
                  parameter shall be set to the SBR sampling rate.
                </t>
              </list>
            </t>
          </list>
        </t>
        <t>
          Optional parameters:
          <list hangIndent="1"
                style="empty">
            <t>
              profile-level-id: a decimal representation of MPEG-4 Audio Profile Level
              indication value defined in <xref target="14496-3">ISO/IEC 14496-3</xref>.
              This parameter indicates
              which MPEG-4 Audio tool subsets the decoder is capable of using. If this
              parameter is not specified in the capability exchange or session setup
              procedure, its default value of 30 (Natural Audio Profile/Level 1) is used.
            </t>
            <t>
              MPS-profile-level-id: a decimal representation of the MPEG Surround Profile
              Level indication as defined in <xref target="14496-3">ISO/IEC 14496-3</xref>.
              This parameter indicates
              the MPEG Surround profile and level that the decoder must be capable in
              order to decode the stream.
            </t>
            <t>
              object: a decimal representation of the MPEG-4 Audio Object Type value
              defined in <xref target="14496-3">ISO/IEC 14496-3</xref>. This parameter
              specifies the tool to be used by the coder. It CAN be used to limit the
              capability within the specified "profile-level-id".
            </t>
            <t>
              bitrate: the data rate for the audio bit stream.
            </t>
            <t>
              cpresent: a boolean parameter indicates whether audio payload configuration
              data has been multiplexed into an RTP payload (see section 4.1). A 0
              indicates the configuration data has not been multiplexed into an RTP
              payload, a 1 indicates that it has. The default if the parameter is omitted is 1.
            </t>
            <t>
              config: a hexadecimal representation of an octet string that expresses the
              audio payload configuration data "StreamMuxConfig", as defined in
              <xref target="14496-3">ISO/IEC 14496-3</xref>. Configuration data is
              mapped onto the octet string in an MSB-first basis. The first bit of the
              configuration data SHALL be located at the MSB of the first octet. In the
              last octet, zero-padding bits, if necessary, SHALL follow the configuration data.
            </t>
            <t>
              MPS-asc: a hexadecimal representation of an octet string that expresses
              audio payload configuration data "AudioSpecificConfig", as defined in
              <xref target="14496-3">ISO/IEC 14496-3</xref>. If this parameter is not
              present the relevant signalling
              is performed by other means (e.g. in-band or contained in the config string).
            </t>
            <t>
              The same mapping rules as for the config parameter apply.
            </t>
            <t>
              ptime: RECOMMENDED duration of each packet in milliseconds.
            </t>
            <t>
              SBR-enabled: a boolean parameter which indicates whether SBR-data can
              be expected in the RTP-payload of a stream. This parameter is relevant
              for an SBR-capable decoder if the presence of SBR can not be detected
              from an out-of-band decoder configuration (e.g. contained in the config string).
            </t>
            <t>
              If this parameter is set to 0, a decoder SHALL expect that SBR is not
              used. If this parameter is set to 1, a decoder SHOULD upsample the
              audio data with the SBR tool, regardless whether SBR data is present
              in the stream or not.
            </t>
            <t>
              If the presence of SBR can not be detected from out-of-band configuration
              and the SBR-enabled parameter is not present, the parameter defaults to 1
              for an SBR-capable decoder. If the resulting output sampling rate or the
              computational complexity is not supported, the SBR tool may be disabled
              or run in downsampled mode.
            </t>
          </list>
        </t>
        <t>
          Published specification:
          <list hangIndent="1"
                style="empty">
            <t>
              Payload format specifications are described in this document.  Encoding
              specifications are provided in <xref target="14496-3">ISO/IEC 14496-3</xref>.
            </t>
          </list>
        </t>
        <t>
          Encoding considerations:
          <list hangIndent="1"
                style="empty">
            <t>
              This type is only defined for transfer via RTP.
            </t>
          </list>
        </t>
        <t>
          Security considerations:
          <list hangIndent="1"
                style="empty">
            <t>
              See Section 6 of RFC 3016.
            </t>
          </list>
        </t>
        <t>
          Interoperability considerations:
          <list hangIndent="1"
                style="empty">
            <t>
              MPEG-4 Audio provides a large and rich set of tools for the coding of
              audio objects.  For effective implementation of the standard, subsets of
              the MPEG-4 Audio tool sets similar to those used in MPEG-4 Visual have
              been provided (see section 5.1).
            </t>
            <t>
              The audio stream SHALL be compliant with the MPEG-4 Audio Profile@Level
              specified by the parameters "profile-level-id" and "MPS-profile-level-id".
              Interoperability between a sender and a receiver may be achieved by
              specifying the parameters "profile-level-id" and "MPS-profile-level-id"
              in MIME content, or by arranging in the capability exchange procedure to
              set this parameter mutually to the same value.  Furthermore, the "object"
              parameter can be used to limit the capability within the specified
              Profile@Level in capability exchange.
            </t>
          </list>
        </t>
        <t>
          Applications which use this media type:
          <list hangIndent="1"
                style="empty">
            <t>
              Audio and video streaming and conferencing tools.
            </t>
          </list>
        </t>
        <t>
          Additional information: none
        </t>
        <t>
          Personal & email address to contact for further information:
          <list hangIndent="1"
                style="empty">
            <t>
              See Section 8 of RFC 3016.
            </t>
          </list>
        </t>
        <t>
          Intended usage: COMMON
        </t>
        <t>
          Author/Change controller:
          <list hangIndent="1"
                style="empty">
            <t>
              See Section 8 of RFC 3016.
            </t>
          </list>
        </t>
      </section>
      <section title="SDP usage of MPEG-4 Audio"
               toc="default">
        <t>
          The MIME media type audio/MP4A-LATM string is mapped to fields in the
          Session Description Protocol (SDP), RFC 4566, as follows:
          <list hangIndent="1"
                style="symbols">
            <t>
              The MIME type (audio) goes in SDP "m=" as the media name.
            </t>
            <t>
              The MIME subtype (MP4A-LATM) goes in SDP "a=rtpmap" as the encoding name.
            </t>
            <t>
              The required parameter "rate" goes in "a=rtpmap" as the clock rate.
            </t>
            <t>
              The optional parameter "ptime" goes in SDP "a=ptime" attribute.
            </t>
            <t>
              The optional parameters "profile-level-id" and "MPS-profile-level-id"
              goes in the "a=fmtp" line to indicate the coder capability. The "object"
              parameter goes in the "a=fmtp" attribute. The payload-format-specific
              parameters "bitrate", "cpresent", "config", "MPS-asc"  and "SBR-enabled"
              go in the "a=fmtp" line.  These parameters are expressed as a MIME media
              type string, in the form of as a semicolon separated list of parameter=value pairs.
            </t>
          </list>
        </t>
        <t>
          The following are some examples of the media representation in SDP:
        </t>
        <t>
          For 6 kb/s CELP bitstreams (with an audio sampling rate of 8 kHz),
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/8000
  a=fmtp:96 profile-level-id=9;object=8;cpresent=0;
  config=40008B18388380
  a=ptime:20
            </artwork>
          </figure>
        </t>
        <t>
          For 64 kb/s AAC LC stereo bitstreams (with an audio sampling rate of 24 kHz),
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/24000
  a=fmtp:96 profile-level-id=1; bitrate=64000; cpresent=0;
  config=400026203fc0
            </artwork>
          </figure>
        </t>
        <t>
          In the above two examples, audio configuration data is not multiplexed
          into the RTP payload and is described only in SDP.  Furthermore, the
          "clock rate" is set to the audio sampling rate.
        </t>
        <t>
          If the clock rate has been set to its default value and it is necessary
          to obtain the audio sampling rate, this can be done by parsing the
          "config" parameter (see the following example).
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/90000
  a=fmtp:96 object=8; cpresent=0; config=40008B18388380
            </artwork>
          </figure>
        </t>
        <t>
          In the examples above the presence of SBR can not be determined by the SDP
          parameter set. If SBR is not present in the payload, the rate parameter
          and/or the StreamMuxConfig contains the audio sampling rate. If SBR is
          present in the payload the rate parameter and/or the StreamMuxConfig
          contains the core codec sampling rate. The StreamMuxConfig shall be
          considered definitive in both cases. 
        </t>
        <t>
          In this case the presence of SBR can not be detected in advance. An SBR
          enabled decoder SHOULD use the SBR tool to upsample the audio data if
          complexity and resulting output sampling rate permits.
        </t>
        <t>
          In the following examples the presence of SBR is not signalled by the
          SDP parameters object, profile-level-id and config string but the
          SBR-enabled parameter is present. The rate parameter and the
          StreamMuxConfig contain the core codec sampling rate. The StreamMuxConfig
          shall be considered definitive. Receivers supporting SBR should set the
          output sampling rate to either the core AAC sampling rate as indicated
          in the StreamMuxConfig (when "SBR-enabled" is set to 0) or twice the
          indicated rate (when "SBR-enabled" is set to 1).
        </t>
        <t>
          Example with "SBR-enabled=0", sampling rate 24khz:
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/24000
  a=fmtp:96 profile-level-id=1; bitrate=64000; cpresent=0;
  SBR-enabled=0; config=400026203fc0
            </artwork>
          </figure>
        </t>
        <t>
          Example with "SBR-enabled=1", receivers supporting SBR should set the sampling rate to 48khz:
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/24000
  a=fmtp:96 profile-level-id=1; bitrate=64000; cpresent=0;
  SBR-enabled=1; config=400026203fc0
            </artwork>
          </figure>
        </t>
        <t>
          When the presence of SBR is explicitly signalled by the SDP parameters
          object, profile-level-id or the config string as in the example below,
          the StreamMuxConfig contains both the core codec sampling rate and the
          SBR sampling rate. The appropriate output sampling rate may be chosen
          dependent on the support of SBR.
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/48000
  a=fmtp:96 profile-level-id=44; bitrate=64000; cpresent=0;
  config=40005623101fe0
            </artwork>
          </figure>
        </t>
        <t>
          The following example shows that the audio configuration data appears in the RTP payload.
          <figure>
            <artwork>
   m=audio 49230 RTP/AVP 96
   a=rtpmap:96 MP4A-LATM/90000
   a=fmtp:96 object=2; cpresent=1
            </artwork>
          </figure>
        </t>
        <t>
          The following examples show how MPEG Surround configuration data can be
          signalled using SDP. The configuration is carried within the config string
          in the first example by using two different layers. The general parameters
          in this example are: AudioMuxVersion=1; allStreamsSameTimeFraming=1;
          numSubFrames=0; numProgram=0; numLayer=1. The first layer describes the
          HE-AAC payload and signals the following parameters: ascLen=25;
          audioObjectType=2 (AAC LC); extensionAudioObjectType=5 (SBR);
          samplingFrequencyIndex=6 (24kHz); extensionSamplingFrequencyIndex=3 (48kHz);
          channelConfiguration=2 (2.0 channels). The second layer describes the MPEG
          surround payload and specifies the following parameters: ascLen=110;
          AudioObjectType=30 (MPEG Surround); samplingFrequencyIndex=3 (48kHz);
          channelConfiguration=6 (5.1 channels); sacPayloadEmbedding=1;
          SpatialSpecificConfig=(48 kHz; 32 slots; 525 tree; ResCoding=1; ResBands=[7,7,7,7]).
        </t>
        <t>
          In this example the signalling is carried by using two different LATM layers.
          The MPEG surround payload is carried together with the AAC playload in a
          single layer as indicated by the sacPayloadEmbedding Flag.
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/48000
  a=fmtp:96 profile-level-id=1; bitrate=64000; cpresent=0;
  SBR-enabled=1;
  config=9FF8005192B11880FF2DDE3699F2408C00536C02313CF3CE0FF0
            </artwork>
          </figure>
        </t>
        <t>
          This following example is an extension of the configuration given above
          by the MPEG Surround specific parameters. The MPS-asc parameter specifies
          the MPEG Surround Baseline Profile at Level 3 (PLI55) and the MPS-asc
          string contains the hexadecimal representation of the MPEG Surround ASC
          [audioObjectType=30 (MPEG Surround); samplingFrequencyIndex=0x3 (48kHz);
          channelConfiguration=6 (5.1 channels); sacPayloadEmbedding=1;
          SpatialSpecificConfig=(48 kHz; 32 slots; 525 tree; ResCoding=1; ResBands=[0,13,13,13])].
          <figure>
            <artwork>
  m=audio 49230 RTP/AVP 96
  a=rtpmap:96 MP4A-LATM/48000
  a=fmtp:96 profile-level-id=44; bitrate=64000; cpresent=0;
  config=40005623101fe0; MPS-profile-level-id=55; 
  MPS-asc=F1B4CF920442029B501185B6DA00;
            </artwork>
          </figure>
        </t>
      </section>
    </section>
    <!-- This PI places the pagebreak correctly (before the section title) in the text output. -->
    <?rfc needLines="8" ?>
    <section title="IANA Considerations"
             toc="default">
      <t>
        This memo defines additional optional format parameters to the Media
        type "audio" and its subtype "MP4A-LATM".
      </t>
      <section title="Media Type registration" anchor="Media Type registration">
        <t>
          This memo defines the following additional optional parameters which SHOULD be used
          if SBR or MPEG Surround data is present inside the payload of an AAC elementary stream.
          <list hangIndent="2"
                style="empty">
            <t>
              MPS-profile-level-id: a decimal representation of the MPEG Surround Profile
              Level indication as defined in <xref target="14496-3">ISO/IEC 14496-3</xref>.
              This parameter indicates the MPEG Surround profile and level that the decoder
              must be capable in order to decode the stream.
            </t>
            <t>
              MPS-asc: a hexadecimal representation of an octet string that expresses
              audio payload configuration data "AudioSpecificConfig", as defined in
              <xref target="14496-3">ISO/IEC 14496-3</xref>. If this parameter is not
              present the relevant signalling is performed by other means (e.g.
              in-band or contained in the config string).
            </t>
            <t>
              SBR-enabled: a boolean parameter which indicates whether SBR-data can
              be expected in the RTP-payload of a stream. This parameter is relevant
              for an SBR-capable decoder if the presence of SBR can not be detected
              from an out-of-band decoder configuration (e.g. contained in the config string).
            </t>
          </list>
        </t>
      </section>

      <section title="Usage of SDP"
               toc="default">
        <t>
          It is assumed that the Media format parameters are conveyed via an SDP
          message as specified in <xref target="RFC3016" />, sections 5.2 and 5.4.
        </t>
      </section>
    </section>
    <!-- Possibly a 'Contributors' section ... -->
    <section anchor="Security"
             title="Security Considerations"
             toc="default">
      <t>
        RTP packets using the payload format defined in this specification are
        subject to the security considerations discussed in the RTP specification
        <xref target="RFC3550" />.  This implies that confidentiality of the media
        streams is achieved by encryption.  Because the data compression used with
        this payload format is applied end-to-end, encryption may be performed on
        the compressed data so there is no conflict between the two operations.
      </t>
      <t>
        The complete MPEG-4 system allows for transport of a wide range of
        content, including Java applets (MPEG-J) and scripts.  Since this payload
        format is restricted to audio and video streams, it is not possible to
        transport such active content in this format.
      </t>
    </section>
  </middle>

  <!-- ----------------------------------------------------------------- -->

  <!--  *****BACK MATTER ***** -->
  <back>
    <!-- References split into informative and normative -->
    <!-- There are 2 ways to insert reference entries from the citation libraries:
     1. define an ENTITY at the top, and use "ampersand character"RFC2629; here (as shown)
     2. simply use a PI "less than character"?rfc include="reference.RFC.2119.xml"?> here
        (for I-Ds: include="reference.I-D.narten-iana-considerations-rfc2434bis.xml")

     Both are cited textually in the same manner: by using xref elements.
     If you use the PI option, xml2rfc will, by default, try to find included files in the same
     directory as the including file. You can also define the XML_LIBRARY environment variable
     with a value containing a set of directories to search.  These can be either in the local
     filing system or remote ones accessed by http (http://domain/dir/... ).-->
    <references title="Normative References">
      &RFC2119;
      &RFC3016;
      &RFC3550;
      &RFC3640;
      <reference anchor="14496-2">
        <front>
          <title>ISO/IEC International Standard 14496-2 - Coding of audio-visual objects, Part 2: Visual</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="1999" />
        </front>
      </reference>
      <reference anchor="14496-2/Amd.1">
        <front>
          <title>ISO/IEC International Standard 14496-2 - Coding of audio-visual objects, Part 2: Visual, Amendment 1: Visual extensions</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="2000" />
        </front>
      </reference>
      <reference anchor="14496-3">
        <front>
          <title>ISO/IEC International Standard 14496-3 - Coding of audio-visual objects, Part 3 Audio</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="2005" />
        </front>
      </reference>
      <reference anchor="14496-1">
        <front>
          <title>ISO/IEC International Standard 14496-1 - Coding of audio-visual objects, Part 1 Systems</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="2004" />
        </front>
      </reference>
      <reference anchor="14496-2/Cor.1">
        <front>
          <title>ISO/IEC International Standard 14496-2 - Coding of audio-visual objects, Part 2: Visual, Technical corrigendum 1</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="2000" />
        </front>
      </reference>
      <reference anchor="14496-12">
        <front>
          <title>ISO/IEC International Standard 14496-12 - Coding of audio-visual objects, Part 12 ISO base media file format</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
        </front>
      </reference>
      <reference anchor="14496-14">
        <front>
          <title>ISO/IEC International Standard 14496-14 - Coding of audio-visual objects, Part 12 MP4 file format</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
        </front>
      </reference>
      <reference anchor="23003-1">
        <front>
          <title>ISO/IEC International Standard 23003-1 - MPEG Surround (MPEG D)</title>
          <author initials=""
                  surname="MPEG">
            <organization></organization>
          </author>
          <date year="2007" />
        </front>
      </reference>
    </references>
  </back>
</rfc>

PAFTECH AB 2003-20262026-04-24 05:41:07