1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235
|
static AVFormatContext *fmt_ctx = NULL;
static AVCodecContext *video_dec_ctx = NULL;
static AVStream *video_stream = NULL;
static const char *src_filename = NULL;
AVCodecContext *dec_ctx = NULL;
AVCodec *dec = NULL;
static int video_stream_idx = -1;
static AVFrame *frame = NULL;
static int video_frame_count = 0;
class ring_buffer{
public:
std::vector <AVFrameSideData> mv_data;
uint8_t* buf_data;
ring_buffer(std::vector <AVFrameSideData> imv_data, uint8_t* ibuf_data): mv_data(imv_data), buf_data(ibuf_data) {
}
ring_buffer() {
mv_data={};
buf_data=nullptr;
}
ring_buffer( ring_buffer &&other){
std::cout << "copy move constructor " <<std::endl;
buf_data=other.buf_data;
other.buf_data=nullptr;
mv_data=other.mv_data;
}
ring_buffer& operator=( ring_buffer &&other){
std::cout << "move assignment operator " <<std::endl;
if (this!=&other) {
if (buf_data)
free(buf_data);
buf_data=other.buf_data;
other.buf_data=nullptr;
mv_data=other.mv_data;
}
return *this;
}
~ring_buffer(){
std::cout << "Destructor" << std::endl;
if (buf_data) {
std::cout << "Freeing data" << std::endl;
free(buf_data);
buf_data=nullptr;
}
}
};
// Create a circular buffer with a capacity for 10 ring_buffer.
boost::circular_buffer<ring_buffer > cb(10);
std::vector<AVFrameSideData> mvects;
uint8_t* buff=nullptr;
static int decode_packet(const AVPacket *pkt, std::vector<AVFrameSideData> *mvect, uint8_t **buffer)
{
if (video_frame_count > 500)
return -1;
std::cout << "FRAME " << video_frame_count << std::endl;
//Start decode here
int ret = avcodec_send_packet(video_dec_ctx, pkt);
if (ret < 0) {
std::cout << "Error sending packet " << std::endl;
return ret;
}
while (ret >= 0) {
ret = avcodec_receive_frame(video_dec_ctx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
std::cout << "Error receiving packet" << std::endl;
return ret;
}
if (ret >= 0) {
int i;
AVFrameSideData *sd;
video_frame_count++;
sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MOTION_VECTORS);
if (sd) {
*mvect = std::vector<AVFrameSideData>(sd,sd+1);
} else
mvect={};
//SAVE the frame buffer to buffer in its default pixelformat
int bufsize=av_image_get_buffer_size(AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);
*buffer = (uint8_t *) av_malloc(bufsize);
ret=av_image_copy_to_buffer(*buffer, bufsize, (const uint8_t **)frame->data, frame->linesize,
AV_PIX_FMT_YUV420P, frame->width, frame->height, 1);
if (ret<0)
return ret;
av_frame_unref(frame);
}
}
return 0;
}
static int open_codec_context(AVFormatContext *fmt_ctx, enum AVMediaType type)
{
int ret;
AVStream *st;
AVDictionary *opts = NULL;
ret = av_find_best_stream(fmt_ctx, type, -1, -1, &dec, 0); //1 this version creates dec
//ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0); //2 this version requires avcodec_find_decoder
if (ret < 0) {
fprintf(stderr, "Could not find %s stream in input file '%s'\n",
av_get_media_type_string(type), src_filename);
return ret;
} else {
int stream_idx = ret;
st = fmt_ctx->streams[stream_idx];
//dec = avcodec_find_decoder(st->codecpar->codec_id); //2
dec_ctx = avcodec_alloc_context3(dec);
if (!dec_ctx) {
fprintf(stderr, "Failed to allocate codec\n");
return AVERROR(EINVAL);
}
ret = avcodec_parameters_to_context(dec_ctx, st->codecpar);
if (ret < 0) {
fprintf(stderr, "Failed to copy codec parameters to codec context\n");
return ret;
}
dec_ctx->pix_fmt=AV_PIX_FMT_YUV420P;
/* Init the video decoder */
av_dict_set(&opts, "flags2", "+export_mvs", 0);
if ((ret = avcodec_open2(dec_ctx, dec, &opts)) < 0) {
fprintf(stderr, "Failed to open %s codec\n", av_get_media_type_string(type));
return ret;
}
video_stream_idx = stream_idx;
video_stream = fmt_ctx->streams[video_stream_idx];
video_dec_ctx = dec_ctx;
}
return 0;
}
int main(int argc, char **argv)
{
int ret = 0;
AVPacket pkt = { 0 };
if (argc != 2) {
fprintf(stderr, "Usage: %s rtsp://<user>:<pass>@url\n", argv[0]);
exit(1);
}
src_filename = argv[1];
avformat_network_init();
av_register_all();
if (avformat_open_input(&fmt_ctx, src_filename, NULL, NULL) < 0) {
fprintf(stderr, "Could not open source %s\n", src_filename);
exit(1);
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
fprintf(stderr, "Could not find stream information\n");
exit(1);
}
open_codec_context(fmt_ctx, AVMEDIA_TYPE_VIDEO);
av_dump_format(fmt_ctx, 0, src_filename, 0);
if (!video_stream) {
fprintf(stderr, "Could not find video stream in the input, aborting\n");
ret = 1;
goto end;
}
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate frame\n");
ret = AVERROR(ENOMEM);
goto end;
}
/* read frames from the file */
while (av_read_frame(fmt_ctx, &pkt) >= 0) {
if (pkt.stream_index == video_stream_idx) {
ret = decode_packet(&pkt, &mvects, &buff);
if (ret >= 0)
cb.push_back(ring_buffer(mvects,buff));
}
av_packet_unref(&pkt);
if (ret < 0)
break;
}
/* flush cached frames */
//decode_packet(NULL,NULL);
end:
avcodec_free_context(&video_dec_ctx);
avformat_close_input(&fmt_ctx);
av_frame_free(&frame);
return ret < 0;
}
|