Line data Source code
1 : /*
2 : * GPAC - Multimedia Framework C SDK
3 : *
4 : * Authors: Jean Le Feuvre
5 : * Copyright (c) Telecom ParisTech 2000-2018
6 : * All rights reserved
7 : *
8 : * This file is part of GPAC / Scene Compositor sub-project
9 : *
10 : * GPAC is free software; you can redistribute it and/or modify
11 : * it under the terms of the GNU Lesser General Public License as published by
12 : * the Free Software Foundation; either version 2, or (at your option)
13 : * any later version.
14 : *
15 : * GPAC is distributed in the hope that it will be useful,
16 : * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 : * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 : * GNU Lesser General Public License for more details.
19 : *
20 : * You should have received a copy of the GNU Lesser General Public
21 : * License along with this library; see the file COPYING. If not, write to
22 : * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
23 : *
24 : */
25 :
26 : #include <gpac/internal/compositor_dev.h>
27 : #include <gpac/internal/scenegraph_dev.h>
28 : #include <gpac/nodes_x3d.h>
29 : #include <gpac/nodes_svg.h>
30 : #include <gpac/network.h>
31 :
32 :
33 : #ifndef GPAC_DISABLE_SVG
34 2 : static GF_MediaObject *get_sync_reference(GF_Scene *scene, XMLRI *iri, u32 o_type, GF_Node *orig_ref, Bool *post_pone)
35 : {
36 : MFURL mfurl;
37 : SFURL sfurl;
38 : GF_MediaObject *res;
39 : GF_Node *ref = NULL;
40 :
41 : u32 stream_id = 0;
42 2 : if (post_pone) *post_pone = GF_FALSE;
43 :
44 2 : if (iri->type==XMLRI_STREAMID) {
45 0 : stream_id = iri->lsr_stream_id;
46 2 : } else if (!iri->string) {
47 : return NULL;
48 : } else {
49 2 : if (iri->target) ref = (GF_Node *)iri->target;
50 2 : else if (iri->string[0]=='#') ref = gf_sg_find_node_by_name(scene->graph, iri->string+1);
51 2 : else ref = gf_sg_find_node_by_name(scene->graph, iri->string);
52 :
53 2 : if (ref) {
54 : GF_FieldInfo info;
55 : /*safety check, break cyclic references*/
56 2 : if (ref==orig_ref) return NULL;
57 :
58 0 : switch (ref->sgprivate->tag) {
59 0 : case TAG_SVG_audio:
60 : o_type = GF_MEDIA_OBJECT_AUDIO;
61 0 : if (gf_node_get_attribute_by_tag(ref, TAG_XLINK_ATT_href, GF_FALSE, GF_FALSE, &info)==GF_OK) {
62 0 : return get_sync_reference(scene, (XMLRI *)info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone);
63 : }
64 : return NULL;
65 0 : case TAG_SVG_video:
66 : o_type = GF_MEDIA_OBJECT_VIDEO;
67 0 : if (gf_node_get_attribute_by_tag(ref, TAG_XLINK_ATT_href, GF_FALSE, GF_FALSE, &info)==GF_OK) {
68 0 : return get_sync_reference(scene, (XMLRI *)info.far_ptr, o_type, orig_ref ? orig_ref : ref, post_pone);
69 : }
70 : return NULL;
71 : default:
72 : return NULL;
73 : }
74 : }
75 : }
76 0 : *post_pone = GF_FALSE;
77 0 : mfurl.count = 1;
78 0 : mfurl.vals = &sfurl;
79 0 : mfurl.vals[0].OD_ID = stream_id;
80 0 : mfurl.vals[0].url = iri->string;
81 :
82 0 : res = gf_scene_get_media_object(scene, &mfurl, o_type, GF_FALSE);
83 0 : if (!res) *post_pone = GF_TRUE;
84 : return res;
85 : }
86 : #endif
87 :
88 :
89 : GF_EXPORT
90 1293 : GF_MediaObject *gf_mo_register(GF_Node *node, MFURL *url, Bool lock_timelines, Bool force_new_res)
91 : {
92 : u32 obj_type;
93 : #ifndef GPAC_DISABLE_SVG
94 : Bool post_pone;
95 : GF_FieldInfo info;
96 : #endif
97 : GF_Scene *scene;
98 : GF_MediaObject *res, *syncRef;
99 1293 : GF_SceneGraph *sg = gf_node_get_graph(node);
100 1293 : if (!sg) return NULL;
101 1293 : scene = (GF_Scene*)gf_sg_get_private(sg);
102 1293 : if (!scene) return NULL;
103 :
104 : syncRef = NULL;
105 :
106 : /*keep track of the kind of object expected if URL is not using OD scheme*/
107 1293 : switch (gf_node_get_tag(node)) {
108 : #ifndef GPAC_DISABLE_VRML
109 : /*MPEG-4 / VRML / X3D only*/
110 : case TAG_MPEG4_AudioClip:
111 : case TAG_MPEG4_AudioSource:
112 : #ifndef GPAC_DISABLE_X3D
113 : case TAG_X3D_AudioClip:
114 : #endif
115 : obj_type = GF_MEDIA_OBJECT_AUDIO;
116 : break;
117 187 : case TAG_MPEG4_SBVCAnimation:
118 : case TAG_MPEG4_AnimationStream:
119 : obj_type = GF_MEDIA_OBJECT_UPDATES;
120 187 : break;
121 6 : case TAG_MPEG4_BitWrapper:
122 : obj_type = GF_MEDIA_OBJECT_SCENE;
123 6 : break;
124 18 : case TAG_MPEG4_InputSensor:
125 : obj_type = GF_MEDIA_OBJECT_INTERACT;
126 18 : break;
127 403 : case TAG_MPEG4_Background2D:
128 : case TAG_MPEG4_Background:
129 : case TAG_MPEG4_ImageTexture:
130 : case TAG_MPEG4_CacheTexture:
131 : case TAG_MPEG4_MovieTexture:
132 : #ifndef GPAC_DISABLE_X3D
133 : case TAG_X3D_Background:
134 : case TAG_X3D_ImageTexture:
135 : case TAG_X3D_MovieTexture:
136 : #endif
137 : obj_type = GF_MEDIA_OBJECT_VIDEO;
138 403 : break;
139 0 : case TAG_MPEG4_Inline:
140 : #ifndef GPAC_DISABLE_X3D
141 : case TAG_X3D_Inline:
142 : #endif
143 : obj_type = GF_MEDIA_OBJECT_SCENE;
144 0 : break;
145 : #endif /*GPAC_DISABLE_VRML*/
146 :
147 : /*SVG*/
148 : #ifndef GPAC_DISABLE_SVG
149 2 : case TAG_SVG_audio:
150 : obj_type = GF_MEDIA_OBJECT_AUDIO;
151 2 : if (gf_node_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, GF_FALSE, GF_FALSE, &info)==GF_OK) {
152 0 : syncRef = get_sync_reference(scene, (XMLRI *)info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone);
153 : /*syncRef is specified but doesn't exist yet, post-pone*/
154 0 : if (post_pone) return NULL;
155 : }
156 : break;
157 3 : case TAG_SVG_video:
158 : obj_type = GF_MEDIA_OBJECT_VIDEO;
159 3 : if (gf_node_get_attribute_by_tag(node, TAG_SVG_ATT_syncReference, GF_FALSE, GF_FALSE, &info)==GF_OK) {
160 2 : syncRef = get_sync_reference(scene, (XMLRI *)info.far_ptr, GF_MEDIA_OBJECT_UNDEF, node, &post_pone);
161 : /*syncRef is specified but doesn't exist yet, post-pone*/
162 2 : if (post_pone) return NULL;
163 : }
164 : break;
165 3 : case TAG_SVG_image:
166 : obj_type = GF_MEDIA_OBJECT_VIDEO;
167 3 : break;
168 0 : case TAG_SVG_foreignObject:
169 : case TAG_SVG_animation:
170 : obj_type = GF_MEDIA_OBJECT_SCENE;
171 0 : break;
172 0 : case TAG_LSR_updates:
173 : obj_type = GF_MEDIA_OBJECT_UPDATES;
174 0 : break;
175 : #endif
176 :
177 107 : default:
178 : obj_type = GF_MEDIA_OBJECT_UNDEF;
179 107 : break;
180 : }
181 :
182 : /*move to primary resource handler*/
183 1293 : while (scene->secondary_resource && scene->root_od->parentscene)
184 : scene = scene->root_od->parentscene;
185 :
186 1293 : res = gf_scene_get_media_object_ex(scene, url, obj_type, lock_timelines, syncRef, force_new_res, node);
187 1293 : return res;
188 : }
189 :
190 : GF_EXPORT
191 446 : void gf_mo_unregister(GF_Node *node, GF_MediaObject *mo)
192 : {
193 446 : if (mo && node) {
194 368 : gf_mo_event_target_remove_by_node(mo, node);
195 : }
196 446 : }
197 :
198 496 : GF_MediaObject *gf_mo_new()
199 : {
200 : GF_MediaObject *mo;
201 496 : mo = (GF_MediaObject *) gf_malloc(sizeof(GF_MediaObject));
202 : memset(mo, 0, sizeof(GF_MediaObject));
203 496 : mo->speed = FIX_ONE;
204 496 : mo->URLs.count = 0;
205 496 : mo->URLs.vals = NULL;
206 496 : mo->evt_targets = gf_list_new();
207 496 : return mo;
208 : }
209 :
210 : GF_EXPORT
211 1821 : Bool gf_mo_get_visual_info(GF_MediaObject *mo, u32 *width, u32 *height, u32 *stride, u32 *pixel_ar, u32 *pixelFormat, Bool *is_flipped)
212 : {
213 1821 : if ((mo->type != GF_MEDIA_OBJECT_VIDEO) && (mo->type!=GF_MEDIA_OBJECT_TEXT)) return GF_FALSE;
214 :
215 1821 : if (mo->config_changed) {
216 410 : gf_mo_update_caps(mo);
217 : }
218 1821 : if (width) *width = mo->width;
219 1821 : if (height) *height = mo->height;
220 1821 : if (stride) *stride = mo->stride;
221 1821 : if (pixel_ar) *pixel_ar = mo->pixel_ar;
222 1821 : if (pixelFormat) *pixelFormat = mo->pixelformat;
223 1821 : if (is_flipped) *is_flipped = mo->is_flipped;
224 : return GF_TRUE;
225 : }
226 :
227 : GF_EXPORT
228 24035 : void gf_mo_get_nb_views(GF_MediaObject *mo, u32 *nb_views)
229 : {
230 24035 : if (mo) *nb_views = mo->nb_views;
231 24035 : }
232 :
233 : GF_EXPORT
234 :
235 10483 : void gf_mo_get_nb_layers(GF_MediaObject *mo, u32 *nb_layers)
236 : {
237 10483 : if (mo) *nb_layers = mo->nb_layers;
238 10483 : }
239 :
240 : GF_EXPORT
241 77 : Bool gf_mo_get_audio_info(GF_MediaObject *mo, u32 *sample_rate, u32 *bits_per_sample, u32 *num_channels, u64 *channel_config, Bool *forced_layout)
242 : {
243 77 : if (!mo->odm || (mo->type != GF_MEDIA_OBJECT_AUDIO)) return GF_FALSE;
244 :
245 69 : if (mo->odm->pid && (!mo->sample_rate || !mo->num_channels))
246 0 : gf_filter_pid_get_packet(mo->odm->pid);
247 :
248 69 : if (mo->config_changed) {
249 68 : gf_mo_update_caps(mo);
250 : }
251 :
252 69 : if (sample_rate) *sample_rate = mo->sample_rate;
253 69 : if (bits_per_sample) *bits_per_sample = mo->afmt;
254 69 : if (num_channels) *num_channels = mo->num_channels;
255 69 : if (channel_config) *channel_config = mo->channel_config;
256 69 : if (forced_layout) *forced_layout = GF_FALSE;
257 :
258 69 : if (mo->odm->ambi_ch_id) {
259 0 : if (mo->num_channels>1) {
260 0 : GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM%d]: tagged as ambisonic channel %d but has %d channels, ignoring ambisonic tag\n", mo->odm->ID, mo->odm->ambi_ch_id, mo->num_channels ));
261 : } else {
262 0 : if (num_channels) *num_channels = 1;
263 0 : if (channel_config) *channel_config = (u64) ( 1 << (mo->odm->ambi_ch_id - 1) );
264 0 : if (forced_layout) *forced_layout = GF_TRUE;
265 :
266 : }
267 : }
268 :
269 : return GF_TRUE;
270 : }
271 :
272 :
273 947 : void gf_mo_update_caps(GF_MediaObject *mo)
274 : {
275 : Bool changed = GF_FALSE;
276 : const GF_PropertyValue *v, *v2;
277 947 : if (!mo->odm || !mo->odm->pid) return;
278 :
279 879 : mo->planar_audio = GF_FALSE;
280 :
281 : #define UPDATE_CAP(_code, _field) \
282 : v = gf_filter_pid_get_property(mo->odm->pid, _code);\
283 : if (v) {\
284 : if (mo->_field && (mo->_field != v->value.uint)) changed=GF_TRUE;\
285 : mo->_field = v->value.uint;\
286 : }\
287 :
288 879 : if (mo->odm->type==GF_STREAM_VISUAL) {
289 :
290 784 : UPDATE_CAP(GF_PROP_PID_WIDTH, width)
291 784 : UPDATE_CAP(GF_PROP_PID_HEIGHT, height)
292 784 : UPDATE_CAP(GF_PROP_PID_STRIDE, stride)
293 784 : UPDATE_CAP(GF_PROP_PID_PIXFMT, pixelformat)
294 784 : UPDATE_CAP(GF_PROP_PID_BITRATE, bitrate)
295 :
296 784 : v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SAR);
297 784 : if (v) {
298 21 : u32 n_par = (v->value.frac.num) << 16 | (v->value.frac.den);
299 21 : if (mo->pixel_ar && (mo->pixel_ar!=n_par)) changed = GF_TRUE;
300 21 : mo->pixel_ar = n_par;
301 : }
302 :
303 784 : v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SRD);
304 784 : v2 = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_SRD_REF);
305 784 : if (v && v->value.vec4i.w && v->value.vec4i.z) {
306 0 : mo->srd_x = v->value.vec4i.x;
307 0 : mo->srd_y = v->value.vec4i.y;
308 0 : mo->srd_w = v->value.vec4i.z;
309 0 : mo->srd_h = v->value.vec4i.w;
310 0 : if (v2) {
311 0 : mo->srd_full_w = v2->value.vec2i.x;
312 0 : mo->srd_full_h = v2->value.vec2i.y;
313 : }
314 :
315 0 : if (mo->odm->parentscene->is_dynamic_scene) {
316 0 : u32 old_type = mo->odm->parentscene->srd_type;
317 0 : if ((mo->srd_w == mo->srd_full_w) && (mo->srd_h == mo->srd_full_h)) {
318 0 : mo->odm->parentscene->srd_type = 2;
319 0 : } else if (!mo->odm->parentscene->srd_type) {
320 0 : mo->odm->parentscene->srd_type = 1;
321 : }
322 0 : if (old_type != mo->odm->parentscene->srd_type) {
323 : //reset scene graph but prevent object stop/start
324 0 : u32 i, count = gf_list_count(mo->odm->parentscene->scene_objects);
325 0 : for (i=0; i<count; i++) {
326 0 : GF_MediaObject *an_mo = gf_list_get(mo->odm->parentscene->scene_objects, i);
327 0 : an_mo->num_open++;
328 : }
329 0 : gf_sg_reset(mo->odm->parentscene->graph);
330 0 : for (i=0; i<count; i++) {
331 0 : GF_MediaObject *an_mo = gf_list_get(mo->odm->parentscene->scene_objects, i);
332 0 : an_mo->num_open--;
333 : }
334 0 : gf_scene_regenerate(mo->odm->parentscene);
335 : }
336 : }
337 : }
338 : // SRD object with no size but global scene size: HEVC tiled based object
339 784 : else if (v2 && v2->value.vec2i.x && v2->value.vec2i.y) {
340 6 : if (mo->odm->parentscene->is_dynamic_scene && !mo->odm->parentscene->srd_type) {
341 6 : mo->odm->parentscene->is_tiled_srd = GF_TRUE;
342 6 : mo->srd_full_w = v2->value.vec2i.x;
343 6 : mo->srd_full_h = v2->value.vec2i.y;
344 : }
345 : }
346 95 : } else if (mo->odm->type==GF_STREAM_AUDIO) {
347 39 : UPDATE_CAP(GF_PROP_PID_SAMPLE_RATE, sample_rate)
348 39 : UPDATE_CAP(GF_PROP_PID_NUM_CHANNELS, num_channels)
349 39 : UPDATE_CAP(GF_PROP_PID_AUDIO_FORMAT, afmt)
350 0 : else mo->afmt = GF_AUDIO_FMT_S16;
351 :
352 39 : v = gf_filter_pid_get_property(mo->odm->pid, GF_PROP_PID_CHANNEL_LAYOUT);
353 39 : if (v) {
354 39 : if (mo->channel_config && (mo->channel_config!=v->value.longuint)) changed = GF_TRUE;
355 39 : mo->channel_config = v->value.longuint;
356 : }
357 :
358 39 : mo->bytes_per_sec = gf_audio_fmt_bit_depth(mo->afmt) * mo->num_channels * mo->sample_rate / 8;
359 39 : mo->planar_audio = gf_audio_fmt_is_planar(mo->afmt);
360 56 : } else if (mo->odm->type==GF_STREAM_OD) {
361 : //nothing to do
362 56 : } else if (mo->odm->type==GF_STREAM_OCR) {
363 : //nothing to do
364 56 : } else if (mo->odm->type==GF_STREAM_SCENE) {
365 : //nothing to do
366 10 : } else if (mo->odm->type==GF_STREAM_TEXT) {
367 : //nothing to do
368 : } else {
369 0 : GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("Unknwon scene object type %d\n", mo->odm->type));
370 : }
371 :
372 823 : if (changed) {
373 : GF_Event evt;
374 2 : GF_Scene *scene = mo->odm->subscene ? mo->odm->subscene : mo->odm->parentscene;
375 : memset(&evt, 0, sizeof(GF_Event));
376 2 : evt.type = GF_EVENT_QUALITY_SWITCHED;
377 2 : gf_sc_send_event(scene->compositor, &evt);
378 : }
379 : }
380 :
381 : static u64 convert_ts_to_ms(GF_MediaObject *mo, u64 ts, u32 timescale, Bool *discard)
382 : {
383 40208 : if (mo->odm->timestamp_offset) {
384 786 : if (mo->odm->timestamp_offset >= 0) {
385 0 : ts += mo->odm->timestamp_offset;
386 786 : } else if (ts < (u64) -mo->odm->timestamp_offset) {
387 : *discard = GF_TRUE;
388 : return 0;
389 : } else {
390 786 : ts -= -mo->odm->timestamp_offset;
391 : }
392 : }
393 40208 : ts *= 1000;
394 40208 : ts /= timescale;
395 : return ts;
396 : }
397 :
398 : GF_EXPORT
399 47973 : u8 *gf_mo_fetch_data(GF_MediaObject *mo, GF_MOFetchMode resync, u32 upload_time_ms, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, s32 *ms_until_next, GF_FilterFrameInterface **outFrame, u32 *planar_size)
400 : {
401 : Bool discard=GF_FALSE;
402 : u32 force_decode_mode = 0;
403 : u32 obj_time, obj_time_orig;
404 : s64 diff;
405 : Bool skip_resync;
406 : u32 timescale=0;
407 47973 : u64 pck_ts=0, next_ts=0;
408 : u32 retry_pull;
409 : Bool is_first = GF_FALSE;
410 : Bool move_to_next_only = GF_FALSE;
411 :
412 47973 : *eos = GF_FALSE;
413 47973 : *timestamp = mo->timestamp;
414 47973 : *size = mo->framesize;
415 47973 : if (ms_until_pres) *ms_until_pres = mo->ms_until_pres;
416 47973 : if (ms_until_next) *ms_until_next = mo->ms_until_next;
417 47973 : if (outFrame) *outFrame = NULL;
418 :
419 47973 : if (!mo->odm || !mo->odm->pid)
420 : return NULL;
421 :
422 : /*if frame locked return it*/
423 46436 : if (mo->nb_fetch) {
424 494 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] ODM %d: CU already fetched, returning\n", mo->odm->ID));
425 494 : mo->nb_fetch ++;
426 494 : if (planar_size) *planar_size = mo->framesize / mo->num_channels;
427 494 : return mo->frame;
428 : }
429 :
430 45942 : if (mo->pck && mo->frame_ifce && (mo->frame_ifce->flags & GF_FRAME_IFCE_BLOCKING) ) {
431 0 : gf_filter_pck_unref(mo->pck);
432 0 : mo->pck = NULL;
433 : }
434 :
435 45942 : if ( gf_odm_check_buffering(mo->odm, NULL) ) {
436 : //special flag set for tiles only, return NULL until we are done buffering
437 5543 : if (mo->odm->flags & GF_ODM_TILED_SHARED_CLOCK) {
438 : return NULL;
439 : }
440 5543 : if (mo->type==GF_MEDIA_OBJECT_AUDIO)
441 : return NULL;
442 : //if buffering, first frame fetched and still buffering return last frame
443 1779 : if (mo->first_frame_fetched && mo->odm->nb_buffering) {
444 374 : return mo->frame_ifce ? (u8 *) mo->frame_ifce : mo->frame;
445 : }
446 : }
447 :
448 41430 : retry:
449 : discard = GF_FALSE;
450 41430 : if (!mo->pck) {
451 31185 : mo->pck = gf_filter_pid_get_packet(mo->odm->pid);
452 31185 : if (!mo->pck) {
453 25174 : if (gf_filter_pid_is_eos(mo->odm->pid)) {
454 5374 : if (!mo->is_eos) {
455 111 : mo->is_eos = GF_TRUE;
456 111 : mediasensor_update_timing(mo->odm, GF_TRUE);
457 111 : gf_odm_on_eos(mo->odm, mo->odm->pid);
458 111 : gf_odm_signal_eos_reached(mo->odm);
459 : }
460 : } else {
461 19800 : mo->odm->ck->has_seen_eos = GF_FALSE;
462 : }
463 25174 : *eos = mo->is_eos;
464 25174 : return NULL;
465 : } else {
466 6011 : gf_filter_pck_ref(&mo->pck);
467 6011 : gf_filter_pid_drop_packet(mo->odm->pid);
468 : }
469 : is_first = GF_TRUE;
470 : }
471 : assert(mo->pck);
472 16256 : mo->first_frame_fetched = GF_TRUE;
473 16256 : *eos = mo->is_eos = GF_FALSE;
474 :
475 : /*not running and no resync (ie audio)*/
476 16256 : if (!gf_clock_is_started(mo->odm->ck)) {
477 499 : if (!resync) {
478 0 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] ODM %d: CB not running, returning\n", mo->odm->ID));
479 : return NULL;
480 499 : } else if (mo->odm->ck->nb_buffering && mo->odm->type==GF_STREAM_AUDIO) {
481 : return NULL;
482 : }
483 : }
484 :
485 16256 : /*data = */gf_filter_pck_get_data(mo->pck, size);
486 16256 : timescale = gf_filter_pck_get_timescale(mo->pck);
487 :
488 16256 : pck_ts = convert_ts_to_ms(mo, gf_filter_pck_get_cts(mo->pck), timescale, &discard);
489 : if (discard) {
490 0 : gf_filter_pck_unref(mo->pck);
491 0 : mo->pck = NULL;
492 0 : goto retry;
493 : }
494 :
495 16256 : if (resync==GF_MO_FETCH_PAUSED)
496 : resync=GF_MO_FETCH;
497 :
498 : retry_pull = 1;
499 : /*fast forward, bench mode with composition memory: force one decode if no data is available*/
500 16256 : if (! *eos && ((mo->odm->ck->speed > FIX_ONE) || mo->odm->parentscene->compositor->bench_mode || (mo->odm->type==GF_STREAM_AUDIO) ) ) {
501 : retry_pull = 10;
502 : force_decode_mode=1;
503 : }
504 :
505 22125 : while (retry_pull) {
506 22125 : retry_pull--;
507 22125 : next_ts = 0;
508 22125 : if (gf_filter_pid_get_first_packet_cts(mo->odm->pid, &next_ts) ) {
509 30326 : next_ts = 1 + convert_ts_to_ms(mo, next_ts, timescale, &discard);
510 15163 : break;
511 : } else {
512 6962 : if (gf_filter_pid_is_eos(mo->odm->pid)) {
513 516 : if (!mo->is_eos) {
514 516 : *eos = mo->is_eos = GF_TRUE;
515 516 : mediasensor_update_timing(mo->odm, GF_TRUE);
516 516 : gf_odm_on_eos(mo->odm, mo->odm->pid);
517 : force_decode_mode=0;
518 : }
519 : break;
520 : }
521 : }
522 6446 : *eos = mo->is_eos;
523 6446 : if (!retry_pull) break;
524 :
525 5869 : gf_filter_pid_try_pull(mo->odm->pid);
526 : }
527 16256 : if (!retry_pull && (force_decode_mode==1)) {
528 383 : GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] At %d could not force a pull from pid - POTENTIAL blank frame after TS %u\n", mo->odm->ID, gf_clock_time(mo->odm->ck), mo->timestamp));
529 : }
530 :
531 : /*resync*/
532 16256 : obj_time = obj_time_orig = gf_clock_time(mo->odm->ck);
533 :
534 16256 : if (mo->odm->prev_clock_at_discontinuity_plus_one) {
535 : s32 diff_new, diff_old, diff_pck_old, diff_pck_new;
536 0 : s32 old_timebase_time = (s32) obj_time;
537 0 : old_timebase_time -= (s32) mo->odm->ck->init_timestamp;
538 0 : old_timebase_time += (s32) mo->odm->prev_clock_at_discontinuity_plus_one;
539 : diff_new = (s32) obj_time;
540 0 : diff_new -= mo->last_fetch_time;
541 0 : if (diff_new < 0) diff_new = -diff_new;
542 : diff_old = (s32) old_timebase_time;
543 0 : diff_old -= mo->last_fetch_time;
544 0 : if (diff_old < 0) diff_old = -diff_old;
545 :
546 0 : diff_pck_old = (s32) pck_ts - (s32) old_timebase_time;
547 0 : diff_pck_new = (s32) pck_ts - (s32) obj_time;
548 0 : if (ABS(diff_pck_old) > ABS(diff_pck_new)) {
549 : //don't reset discontinuity flag for audio
550 0 : if (resync>GF_MO_FETCH) {
551 0 : GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] end of clock discontinuity: diff pck TS to old clock %d to new clock %d\n", mo->odm->ID, diff_pck_old, diff_pck_new));
552 0 : mo->odm->prev_clock_at_discontinuity_plus_one = 0;
553 : }
554 0 : } else if (diff_old < diff_new) {
555 0 : GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] in clock discontinuity: time since fetch old clock %d new clock %d\n", mo->odm->ID, diff_old, diff_new));
556 :
557 : obj_time = old_timebase_time;
558 : }
559 : }
560 :
561 16256 : skip_resync = mo->odm->parentscene->compositor->bench_mode ? GF_TRUE : GF_FALSE;
562 : //no drop mode, only for speed = 1: all frames are presented, we discard the current output only if already presented and next frame time is mature
563 16256 : if ((mo->odm->ck->speed == FIX_ONE)
564 16109 : && (mo->type==GF_MEDIA_OBJECT_VIDEO)
565 : //if no buffer playout we are in low latency configuration, don"t skip resync
566 5075 : && mo->odm->buffer_playout_ms
567 : ) {
568 : assert(mo->odm->parentscene);
569 5075 : if (! mo->odm->parentscene->compositor->drop) {
570 5075 : if (mo->odm->parentscene->compositor->force_late_frame_draw) {
571 0 : mo->flags |= GF_MO_IN_RESYNC;
572 : }
573 5075 : else if (mo->flags & GF_MO_IN_RESYNC) {
574 41 : if (next_ts >= 1 + obj_time) {
575 : skip_resync = GF_TRUE;
576 0 : mo->flags &= ~GF_MO_IN_RESYNC;
577 : }
578 : }
579 5034 : else if (next_ts && (next_ts < pck_ts) ) {
580 : skip_resync = GF_TRUE;
581 : }
582 : //if the next AU is at most 300 ms from the current clock use no drop mode
583 5034 : else if (next_ts + 300 >= obj_time) {
584 : skip_resync = GF_TRUE;
585 17 : } else if (next_ts) {
586 2 : GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[ODM%d] At %u frame TS %u next frame TS %d too late in no-drop mode, enabling drop - resync mode %d\n", mo->odm->ID, obj_time, pck_ts, next_ts, resync));
587 2 : mo->flags |= GF_MO_IN_RESYNC;
588 : }
589 : }
590 : }
591 :
592 11239 : if (skip_resync) {
593 : resync=GF_MO_FETCH; //prevent resync code below
594 5017 : if (mo->odm->parentscene->compositor->use_step_mode) upload_time_ms=0;
595 :
596 : //we are in no resync mode, drop current frame once played and object time just matured
597 : //do it only if clock is started or if compositor step mode is set
598 : //the time threshold for fetching is given by the caller
599 5017 : if ( (gf_clock_is_started(mo->odm->ck) || mo->odm->parentscene->compositor->use_step_mode)
600 4915 : && (mo->timestamp==pck_ts) && next_ts && ( (next_ts <= 1 + obj_time + upload_time_ms) || (next_ts <= 1 + obj_time_orig + upload_time_ms) ) )
601 : {
602 : //drop current and go to next - we use the same loop as regular resync below
603 : resync = GF_MO_FETCH_RESYNC;
604 : move_to_next_only = GF_TRUE;
605 4229 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Switching to CU CTS %u (next %d) now %u\n", mo->odm->ID, pck_ts, next_ts, obj_time));
606 : }
607 : }
608 11239 : if (resync!=GF_MO_FETCH) {
609 : u32 nb_dropped = 0;
610 4574 : while (next_ts) {
611 4458 : if (!move_to_next_only) {
612 229 : if (mo->odm->ck->speed > 0 ? pck_ts >= obj_time : pck_ts <= obj_time )
613 : break;
614 :
615 228 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] Try to drop frame TS %u next frame TS %u obj time %u\n", mo->odm->ID, pck_ts, next_ts, obj_time));
616 :
617 : //nothing ready yet
618 228 : if ( gf_filter_pid_first_packet_is_empty(mo->odm->pid) ) {
619 : break;
620 : }
621 :
622 : /*figure out closest time*/
623 228 : if (mo->odm->ck->speed > 0 ? next_ts > obj_time : next_ts < obj_time) {
624 12 : *eos = GF_FALSE;
625 12 : break;
626 : }
627 :
628 216 : nb_dropped ++;
629 216 : if (nb_dropped>=1) {
630 216 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u dropped frame TS %u\n", mo->odm->ID, obj_time, pck_ts));
631 :
632 216 : mo->odm->nb_dropped++;
633 : }
634 : }
635 :
636 : //delete our packet
637 4445 : gf_filter_pck_unref(mo->pck);
638 4445 : mo->pck = gf_filter_pid_get_packet(mo->odm->pid);
639 : assert(mo->pck);
640 4445 : gf_filter_pck_ref( &mo->pck);
641 :
642 4445 : pck_ts = convert_ts_to_ms(mo, gf_filter_pck_get_cts(mo->pck), timescale, &discard);
643 : //drop next packet from pid
644 4445 : gf_filter_pid_drop_packet(mo->odm->pid);
645 :
646 4445 : if (obj_time != obj_time_orig) {
647 0 : s32 diff_pck_old = (s32) pck_ts - (s32) obj_time;
648 0 : s32 diff_pck_new = (s32) pck_ts - (s32) obj_time_orig;
649 :
650 0 : if (ABS(diff_pck_old) > ABS(diff_pck_new)) {
651 0 : GF_LOG(GF_LOG_INFO, GF_LOG_SYNC, ("[ODM%d] end of clock discontinuity, moving from old time base %d to new %d\n", mo->odm->ID, obj_time, obj_time_orig));
652 : obj_time = obj_time_orig;
653 0 : mo->odm->prev_clock_at_discontinuity_plus_one = 0;
654 : }
655 : }
656 :
657 4445 : next_ts = 0;
658 4445 : if (gf_filter_pid_get_first_packet_cts(mo->odm->pid, &next_ts)) {
659 8688 : next_ts = convert_ts_to_ms(mo, next_ts, timescale, &discard);
660 : }
661 4445 : if (move_to_next_only)
662 : break;
663 : }
664 : }
665 :
666 :
667 16256 : mo->frame = (char *) gf_filter_pck_get_data(mo->pck, &mo->size);
668 16256 : mo->framesize = mo->size - mo->RenderedLength;
669 :
670 : //planar mode, RenderedLength correspond to all channels, so move frame pointer
671 : //to first sample non consumed = RenderedLength/nb_channels
672 16256 : if (mo->planar_audio) {
673 3631 : mo->frame += mo->RenderedLength / mo->num_channels;
674 : } else {
675 12625 : mo->frame += mo->RenderedLength;
676 : }
677 16256 : mo->frame_ifce = gf_filter_pck_get_frame_interface(mo->pck);
678 : // mo->media_frame = CU->frame;
679 :
680 16256 : diff = (s32) ( (mo->speed >= 0) ? ( (s64) pck_ts - (s64) obj_time) : ( (s64) obj_time - (s64) pck_ts) );
681 16256 : mo->ms_until_pres = FIX2INT(diff * mo->speed);
682 :
683 16256 : if (mo->is_eos) {
684 516 : diff = 1000*gf_filter_pck_get_duration(mo->pck) / timescale;
685 516 : if (!diff) diff = 100;
686 : } else {
687 15740 : diff = next_ts ? next_ts : (pck_ts + 1000*gf_filter_pck_get_duration(mo->pck) / timescale);
688 15740 : diff = (s32) ( (mo->speed >= 0) ? ( (s64) diff - (s64) obj_time) : ( (s64) obj_time - (s64) diff) );
689 :
690 15740 : mo->odm->ck->has_seen_eos = GF_FALSE;
691 : }
692 16256 : mo->ms_until_next = FIX2INT(diff * mo->speed);
693 16256 : if (mo->ms_until_next < 0)
694 2983 : mo->ms_until_next = 0;
695 :
696 : //safe guard
697 16256 : if (mo->ms_until_next>500)
698 7063 : mo->ms_until_next=500;
699 :
700 26603 : if ((mo->timestamp != pck_ts) || is_first) {
701 : const GF_PropertyValue *v;
702 : u32 media_time;
703 10347 : u64 dur = gf_filter_pck_get_duration(mo->pck);
704 10347 : dur *= 1000;
705 10347 : dur /= timescale;
706 10347 : mo->frame_dur = (u32) dur;
707 10347 : mo->last_fetch_time = obj_time;
708 :
709 10347 : mo->timestamp = (u32) pck_ts;
710 10347 : media_time = gf_clock_to_media_time(mo->odm->ck, mo->timestamp);
711 :
712 10347 : if (mo->odm->media_current_time <= media_time)
713 10347 : mo->odm->media_current_time = media_time;
714 :
715 10347 : if (mo->odm->parentscene->is_dynamic_scene) {
716 : GF_Scene *s = mo->odm->parentscene;
717 302 : while (s && s->root_od->addon) {
718 0 : s = s->root_od->parentscene;
719 : }
720 302 : if (s && (s->root_od->media_current_time < mo->odm->media_current_time) )
721 127 : s->root_od->media_current_time = mo->odm->media_current_time;
722 : }
723 :
724 : #ifndef GPAC_DISABLE_VRML
725 10347 : if (! *eos )
726 10157 : mediasensor_update_timing(mo->odm, GF_FALSE);
727 : #endif
728 :
729 10347 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d (%s)] At OTB %u fetch frame TS %u size %d (previous TS %u) - %d unit in CB - UTC "LLU" ms - %d ms until CTS is due - %d ms until next frame\n", mo->odm->ID, mo->odm->scene_ns->url, gf_clock_time(mo->odm->ck), pck_ts, mo->framesize, mo->timestamp, gf_filter_pid_get_packet_count(mo->odm->pid), gf_net_get_utc(), mo->ms_until_pres, mo->ms_until_next ));
730 :
731 10347 : v = gf_filter_pck_get_property(mo->pck, GF_PROP_PCK_SENDER_NTP);
732 10347 : if (v) {
733 0 : GF_PropertyEntry *pe = NULL;
734 :
735 0 : mo->odm->last_drawn_frame_ntp_sender = v->value.longuint;
736 :
737 0 : v = gf_filter_pck_get_property(mo->pck, GF_PROP_PCK_RECEIVER_NTP);
738 0 : if (v) {
739 0 : mo->odm->last_drawn_frame_ntp_receive = v->value.longuint;
740 : }
741 :
742 0 : mo->odm->last_drawn_frame_ntp_diff = gf_net_get_ntp_diff_ms(mo->odm->last_drawn_frame_ntp_sender);
743 0 : v = gf_filter_pid_get_info_str(mo->odm->pid, "ntpdiff", &pe);
744 0 : if (v) {
745 0 : mo->odm->last_drawn_frame_ntp_diff -= v->value.sint;
746 : }
747 0 : gf_filter_release_property(pe);
748 0 : GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d (%s)] Frame TS %u NTP diff with sender %d ms\n", mo->odm->ID, mo->odm->scene_ns->url, pck_ts, mo->odm->last_drawn_frame_ntp_diff));
749 :
750 0 : if (mo->odm->parentscene->compositor->ntpsync
751 0 : && (mo->odm->last_drawn_frame_ntp_diff > (s32) mo->odm->parentscene->compositor->ntpsync)
752 : // && first_ntp
753 : ) {
754 : // first_ntp = GF_FALSE;
755 0 : u32 ntp_diff = mo->odm->last_drawn_frame_ntp_diff - mo->odm->parentscene->compositor->ntpsync;
756 0 : mo->odm->ck->init_timestamp += ntp_diff;
757 0 : mo->flags |= GF_MO_IN_RESYNC;
758 : }
759 : }
760 :
761 : /*signal EOS after rendering last frame, not while rendering it*/
762 10347 : *eos = GF_FALSE;
763 :
764 5909 : } else if (*eos) {
765 : //already rendered the last frame, consider we no longer have pending late frame on this stream
766 326 : mo->ms_until_pres = 0;
767 : } else {
768 : // GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d (%s)] At OTB %u same frame fetch TS %u\n", mo->odm->ID, mo->odm->net_service->url, obj_time, CU->TS ));
769 :
770 : //if paused force a high value for next frame
771 5583 : if (!gf_clock_is_started(mo->odm->ck)) {
772 280 : mo->ms_until_next = 100;
773 : }
774 : }
775 :
776 : /*also adjust CU time based on consumed bytes in input, since some codecs output very large audio chunks*/
777 16256 : if (mo->bytes_per_sec) mo->timestamp += mo->RenderedLength * 1000 / mo->bytes_per_sec;
778 :
779 16256 : if (mo->odm->parentscene->compositor->bench_mode) {
780 0 : mo->ms_until_pres = -1;
781 0 : mo->ms_until_next = 1;
782 : }
783 :
784 : //TODO fixme, hack for clock signaling
785 16256 : if (!mo->frame && !mo->frame_ifce)
786 : return NULL;
787 :
788 16256 : mo->nb_fetch ++;
789 16256 : *timestamp = mo->timestamp;
790 16256 : *size = mo->framesize;
791 16256 : if (ms_until_pres) *ms_until_pres = mo->ms_until_pres;
792 16256 : if (ms_until_next) *ms_until_next = mo->ms_until_next;
793 16256 : if (outFrame) *outFrame = mo->frame_ifce;
794 16256 : if (planar_size) *planar_size = mo->framesize / mo->num_channels;
795 :
796 : // gf_odm_service_media_event(mo->odm, GF_EVENT_MEDIA_TIME_UPDATE);
797 :
798 16256 : if (mo->frame_ifce)
799 : return (u8 *) mo->frame_ifce;
800 :
801 16256 : return mo->frame;
802 : }
803 :
804 :
805 : GF_EXPORT
806 17124 : void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 drop_mode)
807 : {
808 17124 : if (!mo || !mo->odm || !mo->odm->pid || !mo->nb_fetch) return;
809 :
810 16750 : mo->nb_fetch--;
811 16750 : if (mo->nb_fetch) {
812 : return;
813 : }
814 :
815 16256 : if (nb_bytes==0xFFFFFFFF) {
816 5146 : mo->RenderedLength = mo->size;
817 : } else {
818 : assert(mo->RenderedLength + nb_bytes <= mo->size);
819 11110 : mo->RenderedLength += nb_bytes;
820 : }
821 :
822 16256 : if (drop_mode<0) {
823 : /*only allow for explicit last frame keeping if only one node is using the resource
824 : otherwise this would block the composition memory*/
825 1 : if (mo->num_open>1) {
826 : drop_mode=0;
827 : } else {
828 : return;
829 : }
830 : }
831 :
832 : /*discard frame*/
833 16255 : if (mo->RenderedLength >= mo->size) {
834 10772 : mo->RenderedLength = 0;
835 :
836 10772 : if (!mo->pck) return;
837 :
838 10772 : if (drop_mode==3)
839 : drop_mode=0;
840 10627 : else if (gf_filter_pck_is_blocking_ref(mo->pck) )
841 : drop_mode = 1;
842 :
843 10487 : if (drop_mode) {
844 5767 : gf_filter_pck_unref(mo->pck);
845 5767 : mo->pck = NULL;
846 5767 : GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u released frame TS %u\n", mo->odm->ID,gf_clock_time(mo->odm->ck), mo->timestamp));
847 : } else {
848 : /*we cannot drop since we don't know the speed of the playback (which can even be frame by frame)*/
849 : }
850 : }
851 : }
852 :
853 : GF_EXPORT
854 24713 : void gf_mo_get_object_time(GF_MediaObject *mo, u32 *obj_time)
855 : {
856 : /*get absolute clock (without drift) for audio*/
857 24713 : if (mo && mo->odm && mo->odm->ck) {
858 7246 : if (mo->odm->type==GF_STREAM_AUDIO)
859 0 : *obj_time = gf_clock_real_time(mo->odm->ck);
860 : else
861 7246 : *obj_time = gf_clock_time(mo->odm->ck);
862 : }
863 : /*unknown / unsupported object*/
864 : else {
865 17467 : *obj_time = 0;
866 : }
867 24713 : }
868 :
869 : GF_EXPORT
870 693 : void gf_mo_play(GF_MediaObject *mo, Double clipBegin, Double clipEnd, Bool can_loop)
871 : {
872 693 : if (!mo) return;
873 :
874 516 : if (!mo->num_open && mo->odm) {
875 479 : mo->is_eos = GF_FALSE;
876 479 : if (mo->odm->state == GF_ODM_STATE_PLAY) {
877 216 : if (mo->odm->flags & GF_ODM_PREFETCH) {
878 216 : mo->odm->flags &= ~GF_ODM_PREFETCH;
879 216 : mo->num_open++;
880 216 : return;
881 : }
882 : }
883 263 : if ( (mo->odm->flags & GF_ODM_NO_TIME_CTRL) || (clipBegin<0) ) {
884 0 : mo->odm->media_start_time = 0;
885 : } else {
886 263 : mo->odm->media_start_time = (u64) (clipBegin*1000);
887 263 : if (mo->odm->duration && (mo->odm->media_start_time > mo->odm->duration)) {
888 1 : if (can_loop) {
889 0 : mo->odm->media_start_time %= mo->odm->duration;
890 : } else {
891 1 : mo->odm->media_start_time = mo->odm->duration;
892 : }
893 : }
894 263 : if (clipEnd>=clipBegin) {
895 0 : mo->odm->media_stop_time = (u64) (clipEnd*1000);
896 0 : if (mo->odm->duration && (mo->odm->media_stop_time >=0) && ((u64) mo->odm->media_stop_time > mo->odm->duration)) {
897 0 : mo->odm->media_stop_time = 0;
898 : }
899 : } else {
900 263 : mo->odm->media_stop_time = 0;
901 : }
902 : }
903 : /*done prefetching*/
904 : assert(! (mo->odm->flags & GF_ODM_PREFETCH) );
905 :
906 263 : gf_odm_start(mo->odm);
907 37 : } else if (mo->odm) {
908 16 : if (mo->num_to_restart) mo->num_restart--;
909 16 : if (!mo->num_restart && (mo->num_to_restart==mo->num_open+1) ) {
910 0 : mediacontrol_restart(mo->odm);
911 0 : mo->num_to_restart = mo->num_restart = 0;
912 : }
913 : }
914 300 : mo->num_open++;
915 : }
916 :
917 : GF_EXPORT
918 457 : void gf_mo_stop(GF_MediaObject **_mo)
919 : {
920 457 : GF_MediaObject *mo = _mo ? *_mo : NULL;
921 457 : if (!mo || !mo->num_open) return;
922 :
923 439 : mo->num_open--;
924 439 : if (!mo->num_open && mo->odm) {
925 412 : mo->first_frame_fetched = GF_FALSE;
926 412 : if (mo->odm->flags & GF_ODM_DESTROYED) {
927 0 : *_mo = NULL;
928 0 : return;
929 : }
930 :
931 412 : if (mo->pck) {
932 236 : gf_filter_pck_unref(mo->pck);
933 236 : mo->pck = NULL;
934 : }
935 :
936 : /*signal STOP request*/
937 412 : if ((mo->OD_ID==GF_MEDIA_EXTERNAL_ID) || (mo->odm && mo->odm->ID && (mo->odm->ID==GF_MEDIA_EXTERNAL_ID))) {
938 61 : gf_odm_disconnect(mo->odm, 2);
939 61 : *_mo = NULL;
940 : } else {
941 351 : if ( gf_odm_stop_or_destroy(mo->odm) ) {
942 0 : *_mo = NULL;
943 : }
944 : }
945 : } else {
946 27 : if (!mo->num_to_restart) {
947 17 : mo->num_restart = mo->num_to_restart = mo->num_open + 1;
948 : }
949 : }
950 : }
951 :
952 : GF_EXPORT
953 10 : void gf_mo_restart(GF_MediaObject *mo)
954 : {
955 : /*if no control and not root of a scene, check timelines are unlocked*/
956 10 : if (!mo->odm->subscene
957 : #ifndef GPAC_DISABLE_VRML
958 6 : && !gf_odm_get_mediacontrol(mo->odm)
959 : #endif
960 : ) {
961 : /*don't restart if sharing parent scene clock*/
962 6 : if (gf_odm_shares_clock(mo->odm, gf_odm_get_media_clock(mo->odm->parentscene->root_od))) {
963 : return;
964 : }
965 : }
966 : /*all other cases, call restart to take into account clock references*/
967 10 : mediacontrol_restart(mo->odm);
968 : }
969 :
970 8023 : u32 gf_mo_get_od_id(MFURL *url)
971 : {
972 : u32 i, j, tmpid;
973 : char *str, *s_url;
974 : u32 id = 0;
975 :
976 8023 : if (!url) return 0;
977 :
978 2740 : for (i=0; i<url->count; i++) {
979 3308 : if (url->vals[i].OD_ID) {
980 : /*works because OD ID 0 is forbidden in MPEG4*/
981 2673 : if (!id) {
982 : id = url->vals[i].OD_ID;
983 : }
984 : /*bad url, only one object can be described in MPEG4 urls*/
985 0 : else if (id != url->vals[i].OD_ID) return 0;
986 635 : } else if (url->vals[i].url && strlen(url->vals[i].url)) {
987 : /*format: od:ID or od:ID#segment - also check for "ID" in case...*/
988 : str = url->vals[i].url;
989 622 : if (!strnicmp(str, "od:", 3)) str += 3;
990 : /*remove segment info*/
991 622 : s_url = gf_strdup(str);
992 : j = 0;
993 16518 : while (j<strlen(s_url)) {
994 15610 : if (s_url[j]=='#') {
995 336 : s_url[j] = 0;
996 336 : break;
997 : }
998 15274 : j++;
999 : }
1000 622 : j = sscanf(s_url, "%u", &tmpid);
1001 : /*be careful, an url like "11-regression-test.mp4" will return 1 on sscanf :)*/
1002 622 : if (j==1) {
1003 : char szURL[20];
1004 54 : sprintf(szURL, "%u", tmpid);
1005 54 : if (stricmp(szURL, s_url)) j = 0;
1006 : }
1007 622 : gf_free(s_url);
1008 :
1009 622 : if (j!= 1) {
1010 : /*dynamic OD if only one URL specified*/
1011 568 : if (!i) return GF_MEDIA_EXTERNAL_ID;
1012 : /*otherwise ignore*/
1013 0 : continue;
1014 : }
1015 54 : if (!id) {
1016 46 : id = tmpid;
1017 46 : continue;
1018 : }
1019 : /*bad url, only one object can be described in MPEG4 urls*/
1020 8 : else if (id != tmpid) return 0;
1021 : }
1022 : }
1023 : return id;
1024 : }
1025 :
1026 :
1027 828 : Bool gf_mo_is_same_url(GF_MediaObject *obj, MFURL *an_url, Bool *keep_fragment, u32 obj_hint_type)
1028 : {
1029 : Bool include_sub_url = GF_FALSE;
1030 : u32 i;
1031 : char szURL1[GF_MAX_PATH], szURL2[GF_MAX_PATH], *ext;
1032 :
1033 828 : if (!obj->URLs.count) {
1034 130 : if (!obj->odm) return GF_FALSE;
1035 126 : strcpy(szURL1, obj->odm->scene_ns->url);
1036 : } else {
1037 698 : strcpy(szURL1, obj->URLs.vals[0].url);
1038 : }
1039 :
1040 : /*don't analyse audio/video to locate segments or viewports*/
1041 824 : if ((obj->type==GF_MEDIA_OBJECT_AUDIO) || (obj->type==GF_MEDIA_OBJECT_VIDEO)) {
1042 182 : if (keep_fragment) *keep_fragment = GF_FALSE;
1043 : include_sub_url = GF_TRUE;
1044 642 : } else if ((obj->type==GF_MEDIA_OBJECT_SCENE) && keep_fragment && obj->odm) {
1045 : u32 j;
1046 : /*for remoteODs/dynamic ODs, check if one of the running service cannot be used*/
1047 482 : for (i=0; i<an_url->count; i++) {
1048 : GF_Scene *scene;
1049 : GF_SceneNamespace *sns;
1050 498 : char *frag = strrchr(an_url->vals[i].url, '#');
1051 498 : j=0;
1052 : /*this is the same object (may need some refinement)*/
1053 514 : if (!stricmp(szURL1, an_url->vals[i].url)) return GF_TRUE;
1054 :
1055 : /*fragment is a media segment, same URL*/
1056 482 : if (frag ) {
1057 : Bool same_res;
1058 38 : frag[0] = 0;
1059 38 : same_res = !strncmp(an_url->vals[i].url, szURL1, strlen(an_url->vals[i].url)) ? GF_TRUE : GF_FALSE;
1060 38 : frag[0] = '#';
1061 :
1062 : /*if we're talking about the same resource, check if the fragment can be matched*/
1063 38 : if (same_res) {
1064 : /*if the fragment is a node which can be found, this is the same resource*/
1065 38 : if (obj->odm->subscene && (gf_sg_find_node_by_name(obj->odm->subscene->graph, frag+1)!=NULL) )
1066 : return GF_TRUE;
1067 :
1068 : /*if the expected type is an existing segment (undefined media type), this is the same resource*/
1069 38 : if (!obj_hint_type && gf_odm_find_segment(obj->odm, frag+1))
1070 : return GF_TRUE;
1071 : }
1072 : }
1073 :
1074 482 : scene = gf_scene_get_root_scene(obj->odm->parentscene ? obj->odm->parentscene : obj->odm->subscene);
1075 482 : while ( (sns = (GF_SceneNamespace*) gf_list_enum(scene->namespaces, &j) ) ) {
1076 : /*sub-service of an existing service - don't touch any fragment*/
1077 : #ifdef FILTER_FIXME
1078 : if (gf_term_service_can_handle_url(sns, an_url->vals[i].url)) {
1079 : *keep_fragment = GF_TRUE;
1080 : return GF_FALSE;
1081 : }
1082 : #endif
1083 : }
1084 : }
1085 : }
1086 :
1087 : /*check on full URL without removing fragment IDs*/
1088 : if (include_sub_url) {
1089 176 : for (i=0; i<an_url->count; i++) {
1090 182 : if (an_url->vals[i].url && !stricmp(szURL1, an_url->vals[i].url)) return GF_TRUE;
1091 : }
1092 176 : if (obj->odm && (obj->odm->flags & GF_ODM_PASSTHROUGH) && an_url->count && an_url->vals[0].url && !strncmp(an_url->vals[0].url, "gpid://", 7))
1093 : return GF_TRUE;
1094 : /*not same resource, we will have to check fragment as URL might point to a sub-service or single stream of a mux*/
1095 118 : if (keep_fragment) *keep_fragment = GF_TRUE;
1096 :
1097 : return GF_FALSE;
1098 : }
1099 626 : ext = strrchr(szURL1, '#');
1100 626 : if (ext) ext[0] = 0;
1101 454 : for (i=0; i<an_url->count; i++) {
1102 626 : if (!an_url->vals[i].url) return GF_FALSE;
1103 : strcpy(szURL2, an_url->vals[i].url);
1104 626 : ext = strrchr(szURL2, '#');
1105 626 : if (ext) ext[0] = 0;
1106 626 : if (!stricmp(szURL1, szURL2)) return GF_TRUE;
1107 : }
1108 : return GF_FALSE;
1109 : }
1110 :
1111 : GF_EXPORT
1112 46 : Bool gf_mo_url_changed(GF_MediaObject *mo, MFURL *url)
1113 : {
1114 : u32 od_id;
1115 : Bool ret = GF_FALSE;
1116 46 : if (!mo) return (url ? GF_TRUE : GF_FALSE);
1117 46 : od_id = gf_mo_get_od_id(url);
1118 46 : if ( (mo->OD_ID == GF_MEDIA_EXTERNAL_ID) && (od_id == GF_MEDIA_EXTERNAL_ID)) {
1119 5 : ret = !gf_mo_is_same_url(mo, url, NULL, 0);
1120 : } else {
1121 41 : ret = (mo->OD_ID == od_id) ? GF_FALSE : GF_TRUE;
1122 : }
1123 : /*special case for 3GPP text: if not playing and user node changed, force removing it*/
1124 46 : if (ret && mo->odm && !mo->num_open && (mo->type == GF_MEDIA_OBJECT_TEXT)) {
1125 1 : mo->flags |= GF_MO_DISPLAY_REMOVE;
1126 : }
1127 : return ret;
1128 : }
1129 :
1130 : GF_EXPORT
1131 1 : void gf_mo_pause(GF_MediaObject *mo)
1132 : {
1133 : #ifndef GPAC_DISABLE_VRML
1134 1 : if (!mo || !mo->num_open || !mo->odm) return;
1135 1 : mediacontrol_pause(mo->odm);
1136 : #endif
1137 : }
1138 :
1139 : GF_EXPORT
1140 1 : void gf_mo_resume(GF_MediaObject *mo)
1141 : {
1142 : #ifndef GPAC_DISABLE_VRML
1143 1 : if (!mo || !mo->num_open || !mo->odm) return;
1144 1 : mediacontrol_resume(mo->odm, 0);
1145 : #endif
1146 : }
1147 :
1148 : GF_EXPORT
1149 431 : void gf_mo_set_speed(GF_MediaObject *mo, Fixed speed)
1150 : {
1151 : #ifndef GPAC_DISABLE_VRML
1152 : MediaControlStack *ctrl;
1153 : #endif
1154 :
1155 431 : if (!mo) return;
1156 243 : if (!mo->odm) {
1157 7 : mo->speed = speed;
1158 7 : return;
1159 : }
1160 : //override startup speed if asked to
1161 236 : if (mo->odm->set_speed) {
1162 : speed = mo->odm->set_speed;
1163 0 : mo->odm->set_speed = 0;
1164 : }
1165 : #ifndef GPAC_DISABLE_VRML
1166 : /*if media control forbidd that*/
1167 236 : ctrl = gf_odm_get_mediacontrol(mo->odm);
1168 236 : if (ctrl) return;
1169 : #endif
1170 :
1171 232 : if (mo->odm->scene_ns && mo->odm->scene_ns->owner && (mo->odm->scene_ns->owner->flags & GF_ODM_INHERIT_TIMELINE))
1172 : return;
1173 :
1174 232 : gf_odm_set_speed(mo->odm, speed, GF_TRUE);
1175 : }
1176 :
1177 : GF_EXPORT
1178 36543 : Fixed gf_mo_get_current_speed(GF_MediaObject *mo)
1179 : {
1180 36543 : return (mo && mo->odm && mo->odm->ck) ? mo->odm->ck->speed : FIX_ONE;
1181 : }
1182 :
1183 : GF_EXPORT
1184 8214 : u32 gf_mo_get_min_frame_dur(GF_MediaObject *mo)
1185 : {
1186 8214 : return mo ? mo->frame_dur : 0;
1187 : }
1188 : GF_EXPORT
1189 5009 : u32 gf_mo_map_timestamp_to_sys_clock(GF_MediaObject *mo, u32 ts)
1190 : {
1191 5009 : return (mo && mo->odm)? mo->odm->ck->start_time + ts : 0;
1192 : }
1193 :
1194 28056 : Bool gf_mo_is_buffering(GF_MediaObject *mo)
1195 : {
1196 28056 : return (mo && mo->odm && mo->odm->ck->nb_buffering) ? GF_TRUE : GF_FALSE;
1197 : }
1198 :
1199 : GF_EXPORT
1200 11859 : Fixed gf_mo_get_speed(GF_MediaObject *mo, Fixed in_speed)
1201 : {
1202 : Fixed res = in_speed;
1203 11859 : if (!mo || !mo->odm) return in_speed;
1204 :
1205 : #ifndef GPAC_DISABLE_VRML
1206 : MediaControlStack *ctrl;
1207 :
1208 : /*get control*/
1209 7777 : ctrl = gf_odm_get_mediacontrol(mo->odm);
1210 7777 : if (ctrl) res = ctrl->control->mediaSpeed;
1211 :
1212 : #endif
1213 :
1214 : return res;
1215 : }
1216 :
1217 : GF_EXPORT
1218 6436 : Bool gf_mo_get_loop(GF_MediaObject *mo, Bool in_loop)
1219 : {
1220 : GF_Clock *ck;
1221 : #ifndef GPAC_DISABLE_VRML
1222 : MediaControlStack *ctrl;
1223 : #endif
1224 6436 : if (!mo || !mo->odm) return in_loop;
1225 :
1226 : /*get control*/
1227 : #ifndef GPAC_DISABLE_VRML
1228 4049 : ctrl = gf_odm_get_mediacontrol(mo->odm);
1229 4049 : if (ctrl) in_loop = ctrl->control->loop;
1230 : #endif
1231 :
1232 : /*otherwise looping is only accepted if not sharing parent scene clock*/
1233 4049 : ck = gf_odm_get_media_clock(mo->odm->parentscene->root_od);
1234 4049 : if (gf_odm_shares_clock(mo->odm, ck)) {
1235 : in_loop = GF_FALSE;
1236 : #ifndef GPAC_DISABLE_VRML
1237 : /*
1238 : if (ctrl && ctrl->stream->odm && ctrl->stream->odm->subscene)
1239 : gf_term_invalidate_compositor(mo->odm->term);
1240 : */
1241 : #endif
1242 : }
1243 : return in_loop;
1244 : }
1245 :
1246 : GF_EXPORT
1247 215 : Double gf_mo_get_duration(GF_MediaObject *mo)
1248 : {
1249 : Double dur;
1250 215 : dur = ((Double) (s64)mo->odm->duration)/1000.0;
1251 215 : return dur;
1252 : }
1253 :
1254 : GF_EXPORT
1255 39 : Bool gf_mo_should_deactivate(GF_MediaObject *mo)
1256 : {
1257 : Bool res = GF_FALSE;
1258 : #ifndef GPAC_DISABLE_VRML
1259 : MediaControlStack *ctrl;
1260 : #endif
1261 :
1262 39 : if (!mo || !mo->odm) return GF_TRUE;
1263 39 : if (!mo->odm->state) return GF_FALSE;
1264 : //if dynamic scene we can deactivate
1265 39 : if (mo->odm->parentscene && mo->odm->parentscene->is_dynamic_scene) {
1266 : return GF_TRUE;
1267 : }
1268 :
1269 : #ifndef GPAC_DISABLE_VRML
1270 : /*get media control and see if object owning control is running*/
1271 35 : ctrl = gf_odm_get_mediacontrol(mo->odm);
1272 35 : if (!ctrl) res = GF_TRUE;
1273 : /*if ctrl and ctrl not ruling this mediaObject, deny deactivation*/
1274 35 : else if (ctrl->stream->odm != mo->odm) res = GF_FALSE;
1275 : /*this is currently under discussion in MPEG. for now we deny deactivation as soon as a mediaControl is here*/
1276 35 : else if (ctrl->stream->odm->state) res = GF_FALSE;
1277 : /*otherwise allow*/
1278 : else
1279 : #endif
1280 : res = GF_TRUE;
1281 :
1282 : return res;
1283 : }
1284 :
1285 : GF_EXPORT
1286 107443 : Bool gf_mo_is_muted(GF_MediaObject *mo)
1287 : {
1288 : #ifndef GPAC_DISABLE_VRML
1289 107443 : return mo->odm->media_ctrl ? mo->odm->media_ctrl->control->mute : GF_FALSE;
1290 : #else
1291 : return GF_FALSE;
1292 : #endif
1293 : }
1294 :
1295 : GF_EXPORT
1296 22793 : Bool gf_mo_is_started(GF_MediaObject *mo)
1297 : {
1298 22793 : if (mo && mo->odm && gf_clock_is_started(mo->odm->ck)) return GF_TRUE;
1299 : return GF_FALSE;
1300 : }
1301 :
1302 : GF_EXPORT
1303 3859 : Bool gf_mo_is_done(GF_MediaObject *mo)
1304 : {
1305 : GF_Clock *ck;
1306 : u64 dur;
1307 3859 : if (!mo || !mo->odm) return GF_FALSE;
1308 :
1309 2103 : if (! mo->odm->has_seen_eos) return GF_FALSE;
1310 :
1311 5 : if ((mo->odm->type==GF_STREAM_AUDIO) || (mo->odm->type==GF_STREAM_VISUAL)) {
1312 : return GF_TRUE;
1313 : }
1314 :
1315 : /*check time - technically this should also apply to video streams since we could extend the duration
1316 : of the last frame - to further test*/
1317 5 : dur = (mo->odm->subscene && mo->odm->subscene->duration) ? mo->odm->subscene->duration : mo->odm->duration;
1318 : /*codec is done, check by duration*/
1319 5 : ck = gf_odm_get_media_clock(mo->odm);
1320 5 : if (gf_clock_time(ck) > dur)
1321 : return GF_TRUE;
1322 :
1323 1 : return GF_FALSE;
1324 : }
1325 :
1326 : /*resyncs clock - only audio objects are allowed to use this*/
1327 : GF_EXPORT
1328 28027 : void gf_mo_adjust_clock(GF_MediaObject *mo, s32 ms_drift)
1329 : {
1330 28027 : if (!mo || !mo->odm) return;
1331 28027 : if (mo->odm->type != GF_STREAM_AUDIO) return;
1332 28027 : gf_clock_set_audio_delay(mo->odm->ck, ms_drift);
1333 : }
1334 :
1335 : GF_EXPORT
1336 5 : void gf_mo_set_flag(GF_MediaObject *mo, GF_MOUserFlags flag, Bool set_on)
1337 : {
1338 5 : if (mo) {
1339 5 : if (set_on)
1340 5 : mo->flags |= flag;
1341 : else
1342 0 : mo->flags &= ~flag;
1343 : }
1344 5 : }
1345 :
1346 : GF_EXPORT
1347 1 : u32 gf_mo_has_audio(GF_MediaObject *mo)
1348 : {
1349 : #ifdef FILTER_FIXME
1350 : char *sub_url;
1351 : #endif
1352 : u32 i;
1353 : GF_SceneNamespace *ns;
1354 : GF_Scene *scene;
1355 1 : if (!mo || !mo->odm) return 0;
1356 1 : if (mo->type != GF_MEDIA_OBJECT_VIDEO) return 0;
1357 1 : if (!mo->odm->scene_ns) return 2;
1358 :
1359 : ns = mo->odm->scene_ns;
1360 1 : scene = mo->odm->parentscene;
1361 : #ifdef FILTER_FIXME
1362 : sub_url = strchr(ns->url, '#');
1363 : #endif
1364 2 : for (i=0; i<gf_list_count(scene->resources); i++) {
1365 1 : GF_ObjectManager *odm = (GF_ObjectManager *)gf_list_get(scene->resources, i);
1366 1 : if (odm->scene_ns != ns) continue;
1367 : //object already associated
1368 1 : if (odm->mo) continue;
1369 :
1370 : #ifdef FILTER_FIXME
1371 : if (sub_url) {
1372 : char *ext = mo->URLs.count ? mo->URLs.vals[0].url : NULL;
1373 : if (ext) ext = strchr(ext, '#');
1374 : if (!ext || strcmp(sub_url, ext)) continue;
1375 : }
1376 : #endif
1377 : /*we have one audio object not bound with the scene from the same service, let's use it*/
1378 0 : if (odm->type == GF_STREAM_AUDIO) return 1;
1379 : }
1380 : return 0;
1381 : }
1382 :
1383 : GF_EXPORT
1384 13 : GF_SceneGraph *gf_mo_get_scenegraph(GF_MediaObject *mo)
1385 : {
1386 13 : if (!mo || !mo->odm || !mo->odm->subscene) return NULL;
1387 13 : return mo->odm->subscene->graph;
1388 : }
1389 :
1390 :
1391 : GF_EXPORT
1392 513 : GF_DOMEventTarget *gf_mo_event_target_add_node(GF_MediaObject *mo, GF_Node *n)
1393 : {
1394 : #ifndef GPAC_DISABLE_SVG
1395 : GF_DOMEventTarget *target = NULL;
1396 513 : if (!mo ||!n) return NULL;
1397 513 : target = gf_dom_event_get_target_from_node(n);
1398 513 : gf_list_add(mo->evt_targets, target);
1399 513 : return target;
1400 : #else
1401 : return NULL;
1402 : #endif
1403 : }
1404 :
1405 472 : GF_Err gf_mo_event_target_remove_by_index(GF_MediaObject *mo, u32 i)
1406 : {
1407 472 : if (!mo) return GF_BAD_PARAM;
1408 472 : gf_list_rem(mo->evt_targets, i);
1409 472 : return GF_OK;
1410 : }
1411 :
1412 3598 : GF_Node *gf_mo_event_target_enum_node(GF_MediaObject *mo, u32 *i)
1413 : {
1414 : GF_DOMEventTarget *target;
1415 3598 : if (!mo || !i) return NULL;
1416 3598 : target = (GF_DOMEventTarget *)gf_list_enum(mo->evt_targets, i);
1417 3598 : if (!target) return NULL;
1418 : //if (target->ptr_type != GF_DOM_EVENT_TARGET_NODE) return NULL;
1419 2002 : return (GF_Node *)target->ptr;
1420 : }
1421 :
1422 1960 : s32 gf_mo_event_target_find_by_node(GF_MediaObject *mo, GF_Node *node)
1423 : {
1424 : u32 i, count;
1425 1960 : count = gf_list_count(mo->evt_targets);
1426 2026 : for (i = 0; i < count; i++) {
1427 1482 : GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1428 1482 : if (target->ptr == node) {
1429 1416 : return i;
1430 : }
1431 : }
1432 : return -1;
1433 : }
1434 :
1435 : GF_EXPORT
1436 424 : GF_Err gf_mo_event_target_remove_by_node(GF_MediaObject *mo, GF_Node *node)
1437 : {
1438 : u32 i, count;
1439 424 : count = gf_list_count(mo->evt_targets);
1440 469 : for (i = 0; i < count; i++) {
1441 45 : GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1442 45 : if (target->ptr == node) {
1443 39 : gf_list_del_item(mo->evt_targets, target);
1444 39 : i--;
1445 39 : count--;
1446 : //return GF_OK;
1447 : }
1448 : }
1449 424 : return GF_BAD_PARAM;
1450 : }
1451 :
1452 : GF_EXPORT
1453 1705 : GF_Node *gf_event_target_get_node(GF_DOMEventTarget *target)
1454 : {
1455 1705 : if (target && (target->ptr_type == GF_DOM_EVENT_TARGET_NODE)) {
1456 1690 : return (GF_Node *)target->ptr;
1457 : }
1458 : return NULL;
1459 : }
1460 :
1461 : GF_EXPORT
1462 1705 : GF_DOMEventTarget *gf_mo_event_target_get(GF_MediaObject *mo, u32 i)
1463 : {
1464 1705 : GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i);
1465 1705 : return target;
1466 : }
1467 :
1468 486 : void gf_mo_event_target_reset(GF_MediaObject *mo)
1469 : {
1470 486 : if (mo->evt_targets) gf_list_reset(mo->evt_targets);
1471 486 : }
1472 :
1473 48566 : u32 gf_mo_event_target_count(GF_MediaObject *mo)
1474 : {
1475 48566 : if (!mo) return 0;
1476 48566 : return gf_list_count(mo->evt_targets);
1477 : }
1478 :
1479 496 : void gf_mo_del(GF_MediaObject *mo)
1480 : {
1481 : assert(gf_list_count(mo->evt_targets) == 0);
1482 496 : gf_list_del(mo->evt_targets);
1483 496 : if (mo->pck) gf_filter_pck_unref(mo->pck);
1484 496 : gf_sg_mfurl_del(mo->URLs);
1485 496 : gf_free(mo);
1486 496 : }
1487 :
1488 :
1489 202 : Bool gf_mo_get_srd_info(GF_MediaObject *mo, GF_MediaObjectVRInfo *vr_info)
1490 : {
1491 : GF_Scene *scene;
1492 202 : if (!vr_info || !mo->odm) return GF_FALSE;
1493 :
1494 202 : scene = mo->odm->subscene ? mo->odm->subscene : mo->odm->parentscene;
1495 : memset(vr_info, 0, sizeof(GF_MediaObjectVRInfo));
1496 :
1497 202 : vr_info->srd_x = mo->srd_x;
1498 202 : vr_info->srd_y = mo->srd_y;
1499 202 : vr_info->srd_w = mo->srd_w;
1500 202 : vr_info->srd_h = mo->srd_h;
1501 202 : vr_info->srd_min_x = scene->srd_min_x;
1502 202 : vr_info->srd_min_y = scene->srd_min_y;
1503 202 : vr_info->srd_max_x = scene->srd_max_x;
1504 202 : vr_info->srd_max_y = scene->srd_max_y;
1505 202 : vr_info->is_tiled_srd = scene->is_tiled_srd;
1506 202 : vr_info->has_full_coverage = (scene->srd_type==2) ? GF_TRUE : GF_FALSE;
1507 :
1508 202 : gf_sg_get_scene_size_info(scene->graph, &vr_info->scene_width, &vr_info->scene_height);
1509 :
1510 202 : if (mo->srd_w && mo->srd_h) return GF_TRUE;
1511 202 : if (mo->srd_full_w && mo->srd_full_h) return GF_TRUE;
1512 152 : return GF_FALSE;
1513 : }
1514 :
1515 : /*sets quality hint for this media object - quality_rank is between 0 (min quality) and 100 (max quality)*/
1516 0 : void gf_mo_hint_quality_degradation(GF_MediaObject *mo, u32 quality_degradation)
1517 : {
1518 0 : if (!mo || !mo->odm || !mo->odm->pid) {
1519 : return;
1520 : }
1521 0 : if (mo->quality_degradation_hint != quality_degradation) {
1522 : GF_FilterEvent evt;
1523 0 : GF_FEVT_INIT(evt, GF_FEVT_QUALITY_SWITCH, mo->odm->pid);
1524 0 : evt.quality_switch.quality_degradation = quality_degradation;
1525 0 : gf_filter_pid_send_event(mo->odm->pid, &evt);
1526 :
1527 0 : mo->quality_degradation_hint = quality_degradation;
1528 : }
1529 : }
1530 :
1531 50 : void gf_mo_hint_visible_rect(GF_MediaObject *mo, u32 min_x, u32 max_x, u32 min_y, u32 max_y)
1532 : {
1533 50 : if (!mo || !mo->odm || !mo->odm->pid) {
1534 : return;
1535 : }
1536 :
1537 50 : if ((mo->view_min_x!=min_x) || (mo->view_max_x!=max_x) || (mo->view_min_y!=min_y) || (mo->view_max_y!=max_y)) {
1538 : GF_FilterEvent evt;
1539 4 : GF_FEVT_INIT(evt, GF_FEVT_VISIBILITY_HINT, mo->odm->pid);
1540 4 : mo->view_min_x = min_x;
1541 4 : mo->view_max_x = max_x;
1542 4 : mo->view_min_y = min_y;
1543 4 : mo->view_max_y = max_y;
1544 :
1545 4 : evt.visibility_hint.min_x = min_x;
1546 4 : evt.visibility_hint.max_x = max_x;
1547 4 : evt.visibility_hint.min_y = min_y;
1548 4 : evt.visibility_hint.max_y = max_y;
1549 :
1550 4 : gf_filter_pid_send_event(mo->odm->pid, &evt);
1551 : }
1552 : }
1553 :
1554 0 : void gf_mo_hint_gaze(GF_MediaObject *mo, u32 gaze_x, u32 gaze_y)
1555 : {
1556 0 : if (!mo || !mo->odm || !mo->odm->pid) {
1557 : return;
1558 : }
1559 :
1560 0 : if ((mo->view_min_x!=gaze_x) || (mo->view_min_y!=gaze_y) ) {
1561 : GF_FilterEvent evt;
1562 0 : GF_FEVT_INIT(evt, GF_FEVT_VISIBILITY_HINT, mo->odm->pid);
1563 0 : mo->view_min_x = gaze_x;
1564 0 : mo->view_min_y = gaze_y;
1565 :
1566 0 : evt.visibility_hint.min_x = gaze_x;
1567 0 : evt.visibility_hint.min_y = gaze_y;
1568 0 : evt.visibility_hint.is_gaze = GF_TRUE;
1569 :
1570 0 : gf_filter_pid_send_event(mo->odm->pid, &evt);
1571 : }
1572 : }
1573 :
1574 :
1575 :
|