+/*\r
+ * GPAC - Multimedia Framework C SDK\r
+ *\r
+ * Copyright (c) Jean Le Feuvre 2000-2005 \r
+ * All rights reserved\r
+ *\r
+ * This file is part of GPAC / exported constants\r
+ *\r
+ * GPAC is free software; you can redistribute it and/or modify\r
+ * it under the terms of the GNU Lesser General Public License as published by\r
+ * the Free Software Foundation; either version 2, or (at your option)\r
+ * any later version.\r
+ * \r
+ * GPAC is distributed in the hope that it will be useful,\r
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
+ * GNU Lesser General Public License for more details.\r
+ * \r
+ * You should have received a copy of the GNU Lesser General Public\r
+ * License along with this library; see the file COPYING. If not, write to\r
+ * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. \r
+ *\r
+ */\r
+\r
+#ifndef _GF_CONSTANTS_H_\r
+#define _GF_CONSTANTS_H_\r
+\r
+#ifdef __cplusplus\r
+extern "C" {\r
+#endif\r
+\r
+#include <gpac/tools.h>\r
+\r
+/*! \addtogroup cst_grp constants\r
+ * \brief Constants used within GPAC\r
+ *\r
+ * This section documents some constants used in the GPAC framework which are not related to\r
+ * any specific sub-project.\r
+ * \ingroup utils_grp\r
+ * @{\r
+ */\r
+\r
+\r
+/*!\r
+ * \brief Supported media stream types\r
+ * \hideinitializer\r
+ *\r
+ * Supported media stream types for media objects.\r
+*/\r
+enum\r
+{\r
+ /*!MPEG-4 Object Descriptor Stream*/\r
+ GF_STREAM_OD = 0x01,\r
+ /*!MPEG-4 Object Clock Reference Stream*/\r
+ GF_STREAM_OCR = 0x02,\r
+ /*!MPEG-4 Scene Description Stream*/\r
+ GF_STREAM_SCENE = 0x03,\r
+ /*!Visual Stream (Video, Image or MPEG-4 SNHC Tools)*/\r
+ GF_STREAM_VISUAL = 0x04,\r
+ /*!Audio Stream (Audio, MPEG-4 Structured-Audio Tools)*/\r
+ GF_STREAM_AUDIO = 0x05,\r
+ /*!MPEG-7 Description Stream*/\r
+ GF_STREAM_MPEG7 = 0x06,\r
+ /*!MPEG-4 Intellectual Property Management and Protection Stream*/\r
+ GF_STREAM_IPMP = 0x07,\r
+ /*!MPEG-4 Object Content Information Stream*/\r
+ GF_STREAM_OCI = 0x08,\r
+ /*!MPEG-4 MPEGlet Stream*/\r
+ GF_STREAM_MPEGJ = 0x09,\r
+ /*!MPEG-4 User Interaction Stream*/\r
+ GF_STREAM_INTERACT = 0x0A,\r
+ /*!MPEG-4 IPMP Tool Stream*/\r
+ GF_STREAM_IPMP_TOOL = 0x0B,\r
+ /*!MPEG-4 Font Data Stream*/\r
+ GF_STREAM_FONT = 0x0C,\r
+ /*!MPEG-4 Streaming Text Stream*/\r
+ GF_STREAM_TEXT = 0x0D,\r
+ /*!Nero Digital Subpicture Stream*/\r
+ GF_STREAM_ND_SUBPIC = 0x38,\r
+\r
+ /*GPAC internal stream types*/\r
+\r
+\r
+ /*!GPAC Private Scene streams\n\r
+ *\n\note\r
+ *this stream type (MPEG-4 user-private) is reserved for streams only used to create a scene decoder \r
+ *handling the scene without input streams, as is the case for file readers (BT/VRML/XML..).\n\r
+ *The decoderSpecificInfo carried is as follows:\r
+ \code \r
+ u32 file_size: total file size \r
+ char file_name[dsi_size - sizeof(u32)]: local file name. \r
+ \n\note: File may be a cache file, it is the decoder responsability to check if the file is completely\r
+ downloaded before parsing if needed.\r
+ \endcode \r
+ *The inBufferLength param for decoders using these streams is the stream clock in ms (no input data is given).\n\r
+ *The "dummy_in" module is available to generate these streams for common files, and also takes care of proper \r
+ clock init in case of seeking.\n\r
+ *This is a reentrant stream type: if any media object with this streamtype also exist in the scene, they will be \r
+ *attached to the scene decoder (except when a new inline scene is detected, in which case a new decoder will \r
+ *be created). This allows for animation/sprite usage along with the systems timing/stream management.\n\r
+ *\n\r
+ *the objectTypeIndication currently in use for these streams are documented below\n\r
+ */\r
+ GF_STREAM_PRIVATE_SCENE = 0x20,\r
+};\r
+\r
+\r
+/*!\r
+ * Media Object types\r
+ * \r
+ * This type provides a hint to network modules which may have to generate an service descriptor on the fly.\r
+ * They occur only if objects/services used in the scene are not referenced through ObjectDescriptors (MPEG-4) \r
+ * but direct through URL\r
+*/\r
+enum\r
+{\r
+ /*!service descriptor expected is of undefined type. This should be treated like GF_MEDIA_OBJECT_SCENE*/\r
+ GF_MEDIA_OBJECT_UNDEF = 0,\r
+ /*!service descriptor expected is of SCENE type and shall contain a scene stream and OD one if needed*/\r
+ GF_MEDIA_OBJECT_SCENE,\r
+ /*!service descriptor expected is of SCENE UPDATES type (animation streams)*/\r
+ GF_MEDIA_OBJECT_UPDATES,\r
+ /*!service descriptor expected is of VISUAL type*/\r
+ GF_MEDIA_OBJECT_VIDEO,\r
+ /*!service descriptor expected is of AUDIO type*/\r
+ GF_MEDIA_OBJECT_AUDIO,\r
+ /*!service descriptor expected is of TEXT type (3GPP/MPEG4)*/\r
+ GF_MEDIA_OBJECT_TEXT,\r
+ /*!service descriptor expected is of UserInteraction type (MPEG-4 InputSensor)*/\r
+ GF_MEDIA_OBJECT_INTERACT,\r
+};\r
+\r
+/*! All Media Objects inserted through URLs and not MPEG-4 OD Framework use this ODID*/\r
+#define GF_MEDIA_EXTERNAL_ID 1050\r
+\r
+\r
+/*!\r
+ * \brief Pixel Formats\r
+ * \r
+ * Supported pixel formats for everything using video\r
+ *\note For textures using 32 bit ARGB/RGB_32/BGR_32:\r
+ *\li on little endian machines, shall be ordered in memory as BGRA, \r
+ *\li on big endians, shall be ordered in memory as ARGB\r
+ *so that *(u32*)pixel_mem is always ARGB (0xAARRGGBB).\r
+*/\r
+typedef enum\r
+{\r
+ /*!8 bit GREY */\r
+ GF_PIXEL_GREYSCALE = GF_4CC('G','R','E','Y'),\r
+ /*!16 bit greyscale*/\r
+ GF_PIXEL_ALPHAGREY = GF_4CC('G','R','A','L'),\r
+ /*!12 bit RGB on 16 bits (4096 colors)*/\r
+ GF_PIXEL_RGB_444 = GF_4CC('R','4','4','4'),\r
+ /*!15 bit RGB*/\r
+ GF_PIXEL_RGB_555 = GF_4CC('R','5','5','5'),\r
+ /*!16 bit RGB*/\r
+ GF_PIXEL_RGB_565 = GF_4CC('R','5','6','5'),\r
+ /*!24 bit RGB*/\r
+ GF_PIXEL_RGB_24 = GF_4CC('R','G','B','3'),\r
+ /*!24 bit BGR - used for graphics cards video format signaling*/\r
+ GF_PIXEL_BGR_24 = GF_4CC('B','G','R','3'),\r
+ /*!32 bit RGB*/\r
+ GF_PIXEL_RGB_32 = GF_4CC('R','G','B','4'),\r
+ /*!32 bit BGR - used for graphics cards video format signaling*/\r
+ GF_PIXEL_BGR_32 = GF_4CC('B','G','R','4'),\r
+\r
+ /*!32 bit ARGB.*/\r
+ GF_PIXEL_ARGB = GF_4CC('A','R','G','B'),\r
+ /*!32 bit RGBA (openGL like)*/\r
+ GF_PIXEL_RGBA = GF_4CC('R','G','B', 'A'),\r
+\r
+ /*!RGB24 + depth plane*/\r
+ GF_PIXEL_RGBD = GF_4CC('R', 'G', 'B', 'D'),\r
+\r
+ /*!RGB24 + depth plane (7 power bits) + shape mask*/\r
+ GF_PIXEL_RGBDS = GF_4CC('3', 'C', 'D', 'S'),\r
+ \r
+ /*!YUV packed format*/\r
+ GF_PIXEL_YUY2 = GF_4CC('Y','U','Y','2'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_YVYU = GF_4CC('Y','V','Y','U'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_UYVY = GF_4CC('U','Y','V','Y'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_VYUY = GF_4CC('V','Y','U','Y'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_Y422 = GF_4CC('Y','4','2','2'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_UYNV = GF_4CC('U','Y','N','V'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_YUNV = GF_4CC('Y','U','N','V'),\r
+ /*!YUV packed format*/\r
+ GF_PIXEL_V422 = GF_4CC('V','4','2','2'),\r
+ \r
+ /*!YUV planar format*/\r
+ GF_PIXEL_YV12 = GF_4CC('Y','V','1','2'),\r
+ /*!YUV planar format*/\r
+ GF_PIXEL_IYUV = GF_4CC('I','Y','U','V'),\r
+ /*!YUV planar format*/\r
+ GF_PIXEL_I420 = GF_4CC('I','4','2','0'),\r
+ \r
+ /*!YV12 + Alpha plane*/\r
+ GF_PIXEL_YUVA = GF_4CC('Y', 'U', 'V', 'A'),\r
+\r
+ /*!YV12 + Depth plane*/\r
+ GF_PIXEL_YUVD = GF_4CC('Y', 'U', 'V', 'D'),\r
+\r
+} GF_PixelFormat;\r
+\r
+\r
+/*!\r
+ * \brief Scene ObjectTypeIndication Formats\r
+ * \r
+ * Supported ObjectTypeIndication for scene description streams. *_FILE_* are only used with private scene streams\r
+ * and only carry the file name for the scene. Other internal stream types can be used in a real streaming environment\r
+*/\r
+enum\r
+{\r
+ /*!OTI for BIFS v1*/\r
+ GPAC_OTI_SCENE_BIFS = 0x01,\r
+ /*!OTI for BIFS v2*/\r
+ GPAC_OTI_SCENE_BIFS_V2 = 0x02,\r
+ /*!OTI for BIFS InputSensor streams*/\r
+ GPAC_OTI_SCENE_INTERACT = 0x03,\r
+ /*!OTI forLASeR streams*/\r
+ GPAC_OTI_SCENE_LASER = 0x09,\r
+\r
+ /*!OTI for dummy streams (dsi = file name) using the generci context loader (BIFS/VRML/SWF/...) - GPAC internal*/\r
+ GPAC_OTI_PRIVATE_SCENE_GENERIC = 0xC0,\r
+ /*!OTI for SVG dummy stream (dsi = file name) - GPAC internal*/\r
+ GPAC_OTI_PRIVATE_SCENE_SVG = 0xC1,\r
+ /*!OTI for LASeR/SAF+XML dummy stream (dsi = file name) - GPAC internal*/\r
+ GPAC_OTI_PRIVATE_SCENE_LASER = 0xC2,\r
+ /*!OTI for XBL dummy streams (dsi = file name) - GPAC internal*/\r
+ GPAC_OTI_PRIVATE_SCENE_XBL = 0xC3,\r
+ /*!OTI for EPG dummy streams (dsi = null) - GPAC internal*/\r
+ GPAC_OTI_PRIVATE_SCENE_EPG = 0xC4,\r
+\r
+ /*!OTI for streaming SVG - GPAC internal*/\r
+ GPAC_OTI_SCENE_SVG = 0xD0,\r
+ /*!OTI for streaming SVG + gz - GPAC internal*/\r
+ GPAC_OTI_SCENE_SVG_GZ = 0xD1,\r
+ /*!OTI for DIMS (dsi = 3GPP DIMS configuration) - GPAC internal*/\r
+ GPAC_OTI_SCENE_DIMS = 0xD2,\r
+};\r
+\r
+\r
+/*!\r
+ * \brief Extra ObjectTypeIndication\r
+ *\r
+ * ObjectTypeIndication for media (audio/video) codecs not defined in MPEG-4. Since GPAC signals streams through MPEG-4 Descriptions,\r
+ * it needs extensions for non-MPEG-4 streams such as AMR, H263 , etc.\n\r
+ *\note The decoder specific info for such streams is always carried encoded, with the following syntax:\n\r
+ * DSI Syntax for audio streams\r
+ \code \r
+ * u32 codec_four_cc: the codec 4CC reg code / codec id for ffmpeg\r
+ * u32 sample_rate: sampling rate or 0 if unknown\r
+ * u16 nb_channels: num channels or 0 if unknown\r
+ * u16 frame_size: num audio samples per frame or 0 if unknown\r
+ * u8 nb_bits_per_sample: nb bits or 0 if unknown\r
+ * u8 num_frames_per_au: num audio frames per AU (used in 3GPP, max 15), 0 if unknown\r
+ * char *data: per-codec extensions till end of DSI bitstream\r
+ \endcode\r
+ \n\r
+ * DSI Syntax for video streams\r
+ \code \r
+ * u32 codec_four_cc: the codec 4CC reg code / codec id for ffmpeg\r
+ * u16 width: video width or 0 if unknown\r
+ * u16 height: video height or 0 if unknown\r
+ * char *data: per-codec extensions till end of DSI bitstream\r
+ \endcode\r
+*/\r
+#define GPAC_OTI_MEDIA_GENERIC 0x80\r
+\r
+/*!\r
+ * \brief FFMPEG ObjectTypeIndication\r
+ *\r
+ * ObjectTypeIndication for FFMPEG codecs not defined in MPEG-4. FFMPEG uses the base GPAC_OTI_MEDIA_GENERIC specific info formats, and extends it as follows:\r
+ \code \r
+ * u32 bit_rate: the stream rate or 0 if unknown\r
+ * u32 codec_tag: FFMPEG codec tag as defined in libavcodec\r
+ * char *data: codec extensions till end of DSI bitstream\r
+ \endcode\r
+ */\r
+#define GPAC_OTI_MEDIA_FFMPEG 0x81\r
+\r
+\r
+/*!\r
+ * \brief OGG ObjectTypeIndication\r
+ *\r
+ * Object type indication for all OGG media. The DSI contains all intitialization ogg packets for the codec\r
+ * and is formated as follows:\n\r
+ *\code \r
+ while (dsi_size) {\r
+ bit(16) packet_size;\r
+ char packet[packet_size];\r
+ dsi_size -= packet_size;\r
+ }\endcode\r
+*/\r
+#define GPAC_OTI_MEDIA_OGG 0xDD\r
+\r
+\r
+/*channel cfg flags - DECODERS MUST OUTPUT STEREO/MULTICHANNEL IN THIS ORDER*/\r
+/*!\r
+ * \brief Audio Channel Configuration\r
+ *\r
+ * Audio channel flags for spatialization.\r
+ \note Decoders must output stereo/multichannel audio channels in this order in the decoded audio frame.\r
+ */\r
+enum\r
+{\r
+ /*!Left Audio Channel*/\r
+ GF_AUDIO_CH_FRONT_LEFT = (1),\r
+ /*!Right Audio Channel*/\r
+ GF_AUDIO_CH_FRONT_RIGHT = (1<<1),\r
+ /*!Center Audio Channel - may also be used to signal monophonic audio*/\r
+ GF_AUDIO_CH_FRONT_CENTER = (1<<2),\r
+ /*!LFE Audio Channel*/\r
+ GF_AUDIO_CH_LFE = (1<<3),\r
+ /*!Back Left Audio Channel*/\r
+ GF_AUDIO_CH_BACK_LEFT = (1<<4),\r
+ /*!Back Right Audio Channel*/\r
+ GF_AUDIO_CH_BACK_RIGHT = (1<<5),\r
+ /*!Back Center Audio Channel*/\r
+ GF_AUDIO_CH_BACK_CENTER = (1<<6),\r
+ /*!Side Left Audio Channel*/\r
+ GF_AUDIO_CH_SIDE_LEFT = (1<<7),\r
+ /*!Side Right Audio Channel*/\r
+ GF_AUDIO_CH_SIDE_RIGHT = (1<<8)\r
+};\r
+\r
+\r
+\r
+/*DIMS unit flags */\r
+/*!\r
+ * \brief DIMS Unit header flags\r
+ *\r
+ * DIMS Unit header flags as 3GPP TS 26.142.\r
+ */\r
+enum\r
+{\r
+ /*!S: is-Scene: DIMS unit contains a complete document (<svg>*/\r
+ GF_DIMS_UNIT_S = 1,\r
+ /*!M: is-RAP: DIMS unit is a random access point*/\r
+ GF_DIMS_UNIT_M = 1<<1,\r
+ /*!I: is-Redundant: DIMS unit is made of redundant data*/\r
+ GF_DIMS_UNIT_I = 1<<2,\r
+ /*!D: redundant-exit: DIMS unit is the end of redundant data*/\r
+ GF_DIMS_UNIT_D = 1<<3,\r
+ /*!P: priority: DIMS unit is high priority*/\r
+ GF_DIMS_UNIT_P = 1<<4,\r
+ /*!C: compressed: DIMS unit is compressed*/\r
+ GF_DIMS_UNIT_C = 1<<5\r
+};\r
+\r
+\r
+/*!\r
+ \cond DUMMY_DOXY_SECTION\r
+*/\r
+\r
+/*AVC NAL unit types*/\r
+#define GF_AVC_NALU_NON_IDR_SLICE 0x1\r
+#define GF_AVC_NALU_DP_A_SLICE 0x2\r
+#define GF_AVC_NALU_DP_B_SLICE 0x3\r
+#define GF_AVC_NALU_DP_C_SLICE 0x4\r
+#define GF_AVC_NALU_IDR_SLICE 0x5\r
+#define GF_AVC_NALU_SEI 0x6\r
+#define GF_AVC_NALU_SEQ_PARAM 0x7\r
+#define GF_AVC_NALU_PIC_PARAM 0x8\r
+#define GF_AVC_NALU_ACCESS_UNIT 0x9\r
+#define GF_AVC_NALU_END_OF_SEQ 0xa\r
+#define GF_AVC_NALU_END_OF_STREAM 0xb\r
+#define GF_AVC_NALU_FILLER_DATA 0xc\r
+\r
+#define GF_AVC_TYPE_P 0\r
+#define GF_AVC_TYPE_B 1\r
+#define GF_AVC_TYPE_I 2\r
+#define GF_AVC_TYPE_SP 3\r
+#define GF_AVC_TYPE_SI 4\r
+#define GF_AVC_TYPE2_P 5\r
+#define GF_AVC_TYPE2_B 6\r
+#define GF_AVC_TYPE2_I 7\r
+#define GF_AVC_TYPE2_SP 8\r
+#define GF_AVC_TYPE2_SI 9\r
+\r
+\r
+/*rate sizes - note that these sizes INCLUDE the rate_type header byte*/\r
+static const u32 GF_QCELP_RATE_TO_SIZE [] = {0, 1, 1, 4, 2, 8, 3, 17, 4, 35, 5, 8, 14, 1};\r
+static const u32 GF_QCELP_RATE_TO_SIZE_NB = 7;\r
+static const u32 GF_SMV_EVRC_RATE_TO_SIZE [] = {0, 1, 1, 3, 2, 6, 3, 11, 4, 23, 5, 1};\r
+static const u32 GF_SMV_EVRC_RATE_TO_SIZE_NB = 6;\r
+static const u32 GF_AMR_FRAME_SIZE[16] = { 12, 13, 15, 17, 19, 20, 26, 31, 5, 0, 0, 0, 0, 0, 0, 0 };\r
+static const u32 GF_AMR_WB_FRAME_SIZE[16] = { 17, 23, 32, 36, 40, 46, 50, 58, 60, 5, 5, 0, 0, 0, 0, 0 };\r
+\r
+\r
+/*!\r
+ \endcond\r
+*/\r
+\r
+\r
+/*! @} */\r
+\r
+#ifdef __cplusplus\r
+}\r
+#endif\r
+\r
+#endif /*_GF_CONSTANTS_H_*/\r