Newer
Older
if (size < mtu) { // test - don't fragment
// Single NAL Unit
f = create_video_frame(start, d - 4, AST_FORMAT_H264, 0, cur);
if (!f)
break;
if (!first)
first = f;
cur = f;
continue;
}
// Fragmented Unit (Mode A: no DON, very weak)
hdr[0] = (*start & 0xe0) | 28; /* mark as a fragmentation unit */
hdr[1] = (*start++ & 0x1f) | 0x80 ; /* keep type and set START bit */
size--; /* skip the NAL header */
while (size) {
uint8_t *data;
int frag_size = MIN(size, mtu);
f = create_video_frame(start, start+frag_size, AST_FORMAT_H264, 2, cur);
if (!f)
break;
size -= frag_size; /* skip this data block */
start += frag_size;
data = f->data.ptr;
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
data[0] = hdr[0];
data[1] = hdr[1] | (size == 0 ? 0x40 : 0); /* end bit if we are done */
hdr[1] &= ~0x80; /* clear start bit for subsequent frames */
if (!first)
first = f;
cur = f;
}
}
if (cur)
cur->subclass |= 1; // RTP Marker
*tail = cur;
return first;
}
static int h264_decap(struct fbuf_t *b, uint8_t *data, int len)
{
/* Start Code Prefix (Annex B in specification) */
uint8_t scp[] = { 0x00, 0x00, 0x00, 0x01 };
int retval = 0;
int type, ofs = 0;
if (len < 2) {
ast_log(LOG_WARNING, "--- invalid len %d\n", len);
return 1;
}
/* first of all, check if the packet has F == 0 */
if (data[0] & 0x80) {
ast_log(LOG_WARNING, "--- forbidden packet; nal: %02x\n",
data[0]);
return 1;
}
type = data[0] & 0x1f;
switch (type) {
case 0:
case 31:
ast_log(LOG_WARNING, "--- invalid type: %d\n", type);
return 1;
case 24:
case 25:
case 26:
case 27:
case 29:
ast_log(LOG_WARNING, "--- encapsulation not supported : %d\n", type);
return 1;
case 28: /* FU-A Unit */
if (data[1] & 0x80) { // S == 1, import F and NRI from next
data[1] &= 0x1f; /* preserve type */
data[1] |= (data[0] & 0xe0); /* import F & NRI */
retval = fbuf_append(b, scp, sizeof(scp), 0, 0);
ofs = 1;
} else {
ofs = 2;
}
break;
default: /* From 1 to 23 (Single NAL Unit) */
retval = fbuf_append(b, scp, sizeof(scp), 0, 0);
}
if (!retval)
retval = fbuf_append(b, data + ofs, len - ofs, 0, 0);
if (retval)
ast_log(LOG_WARNING, "result %d\n", retval);
return retval;
}
static struct video_codec_desc h264_codec = {
.name = "h264",
.format = AST_FORMAT_H264,
.enc_init = h264_enc_init,
.enc_encap = h264_encap,
.enc_run = ffmpeg_encode,
.dec_init = h264_dec_init,
.dec_decap = h264_decap,
.dec_run = ffmpeg_decode
};
/*
* Table of translation between asterisk and ffmpeg formats.
* We need also a field for read and write (encoding and decoding), because
* e.g. H263+ uses different codec IDs in ffmpeg when encoding or decoding.
*/
struct _cm { /* map ffmpeg codec types to asterisk formats */
uint32_t ast_format; /* 0 is a terminator */
enum CodecID codec;
enum { CM_RD = 1, CM_WR = 2, CM_RDWR = 3 } rw; /* read or write or both ? */
//struct video_codec_desc *codec_desc;
static const struct _cm video_formats[] = {
{ AST_FORMAT_H263_PLUS, CODEC_ID_H263, CM_RD }, /* incoming H263P ? */
{ AST_FORMAT_H263_PLUS, CODEC_ID_H263P, CM_WR },
{ AST_FORMAT_H263, CODEC_ID_H263, CM_RD },
{ AST_FORMAT_H263, CODEC_ID_H263, CM_WR },
{ AST_FORMAT_H261, CODEC_ID_H261, CM_RDWR },
{ AST_FORMAT_H264, CODEC_ID_H264, CM_RDWR },
{ AST_FORMAT_MP4_VIDEO, CODEC_ID_MPEG4, CM_RDWR },
{ 0, 0, 0 },
};
/*! \brief map an asterisk format into an ffmpeg one */
static enum CodecID map_video_format(uint32_t ast_format, int rw)
{
struct _cm *i;
for (i = video_formats; i->ast_format != 0; i++)
if (ast_format & i->ast_format && rw & i->rw) {
return i->codec;
return CODEC_ID_NONE;
}
/* pointers to supported codecs. We assume the first one to be non null. */
static const struct video_codec_desc *supported_codecs[] = {
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
&h263p_codec,
&h264_codec,
&h263_codec,
&h261_codec,
&mpeg4_codec,
NULL
};
/*
* Map the AST_FORMAT to the library. If not recognised, fail.
* This is useful in the input path where we get frames.
*/
static struct video_codec_desc *map_video_codec(int fmt)
{
int i;
for (i = 0; supported_codecs[i]; i++)
if (fmt == supported_codecs[i]->format) {
ast_log(LOG_WARNING, "using %s for format 0x%x\n",
supported_codecs[i]->name, fmt);
return supported_codecs[i];
}
return NULL;
}
/*! \brief uninitialize the descriptor for remote video stream */
static struct video_dec_desc *dec_uninit(struct video_dec_desc *v)
{
int i;
if (v == NULL) /* not initialized yet */
return NULL;
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
if (v->parser) {
av_parser_close(v->parser);
v->parser = NULL;
}
if (v->dec_ctx) {
avcodec_close(v->dec_ctx);
av_free(v->dec_ctx);
v->dec_ctx = NULL;
}
if (v->d_frame) {
av_free(v->d_frame);
v->d_frame = NULL;
}
v->codec = NULL; /* only a reference */
v->d_callbacks = NULL; /* forget the decoder */
v->discard = 1; /* start in discard mode */
for (i = 0; i < N_DEC_IN; i++)
fbuf_free(&v->dec_in[i]);
fbuf_free(&v->dec_out);
ast_free(v);
return NULL; /* error, in case someone cares */
}
/*
* initialize ffmpeg resources used for decoding frames from the network.
*/
static struct video_dec_desc *dec_init(uint32_t the_ast_format)
{
enum CodecID codec;
struct video_dec_desc *v = ast_calloc(1, sizeof(*v));
if (v == NULL)
return NULL;
v->discard = 1;
v->d_callbacks = map_video_codec(the_ast_format);
if (v->d_callbacks == NULL) {
ast_log(LOG_WARNING, "cannot find video codec, drop input 0x%x\n", the_ast_format);
return dec_uninit(v);
}
codec = map_video_format(v->d_callbacks->format, CM_RD);
v->codec = avcodec_find_decoder(codec);
if (!v->codec) {
ast_log(LOG_WARNING, "Unable to find the decoder for format %d\n", codec);
return dec_uninit(v);
}
/*
* Initialize the codec context.
*/
v->dec_ctx = avcodec_alloc_context();
if (!v->dec_ctx) {
ast_log(LOG_WARNING, "Cannot allocate the decoder context\n");
return dec_uninit(v);
}
/* XXX call dec_init() ? */
if (avcodec_open(v->dec_ctx, v->codec) < 0) {
ast_log(LOG_WARNING, "Cannot open the decoder context\n");
av_free(v->dec_ctx);
v->dec_ctx = NULL;
return dec_uninit(v);
}
v->parser = av_parser_init(codec);
if (!v->parser) {
ast_log(LOG_WARNING, "Cannot initialize the decoder parser\n");
return dec_uninit(v);
}
v->d_frame = avcodec_alloc_frame();
if (!v->d_frame) {
ast_log(LOG_WARNING, "Cannot allocate decoding video frame\n");
return dec_uninit(v);
}
v->dec_in_cur = &v->dec_in[0]; /* buffer for incoming frames */
v->dec_in_dpy = NULL; /* nothing to display */
return v; /* ok */
}
/*------ end codec specific code -----*/