Remove unused functions in VCMTiming.
Remove VCMTiming::EnoughTimeToDecode, VCMTiming::ResetDecodeTime. Make VCMTiming::StopDecodeTimer void (always returning zero). Update ReceiverTiming.WrapAround test to insert timestamp that wraps. Bug: none Change-Id: I85a8bfd6be18371810b638284b4af73a46894be7 Reviewed-on: https://webrtc-review.googlesource.com/36060 Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Commit-Queue: Åsa Persson <asapersson@webrtc.org> Cr-Commit-Position: refs/heads/master@{#21660}
This commit is contained in:
@ -32,7 +32,6 @@ class VCMTiming {
|
||||
|
||||
// Resets the timing to the initial state.
|
||||
void Reset();
|
||||
void ResetDecodeTime();
|
||||
|
||||
// Set the amount of time needed to render an image. Defaults to 10 ms.
|
||||
void set_render_delay(int render_delay_ms);
|
||||
@ -41,16 +40,12 @@ class VCMTiming {
|
||||
// get the desired jitter buffer level.
|
||||
void SetJitterDelay(int required_delay_ms);
|
||||
|
||||
// Set the minimum playout delay from capture to render in ms.
|
||||
// Set/get the minimum playout delay from capture to render in ms.
|
||||
void set_min_playout_delay(int min_playout_delay_ms);
|
||||
|
||||
// Returns the minimum playout delay from capture to render in ms.
|
||||
int min_playout_delay();
|
||||
|
||||
// Set the maximum playout delay from capture to render in ms.
|
||||
// Set/get the maximum playout delay from capture to render in ms.
|
||||
void set_max_playout_delay(int max_playout_delay_ms);
|
||||
|
||||
// Returns the maximum playout delay from capture to render in ms.
|
||||
int max_playout_delay();
|
||||
|
||||
// Increases or decreases the current delay to get closer to the target delay.
|
||||
@ -67,17 +62,18 @@ class VCMTiming {
|
||||
|
||||
// Stops the decoder timer, should be called when the decoder returns a frame
|
||||
// or when the decoded frame callback is called.
|
||||
int32_t StopDecodeTimer(uint32_t time_stamp,
|
||||
int32_t decode_time_ms,
|
||||
int64_t now_ms,
|
||||
int64_t render_time_ms);
|
||||
void StopDecodeTimer(uint32_t time_stamp,
|
||||
int32_t decode_time_ms,
|
||||
int64_t now_ms,
|
||||
int64_t render_time_ms);
|
||||
|
||||
// Used to report that a frame is passed to decoding. Updates the timestamp
|
||||
// filter which is used to map between timestamps and receiver system time.
|
||||
void IncomingTimestamp(uint32_t time_stamp, int64_t last_packet_time_ms);
|
||||
|
||||
// Returns the receiver system time when the frame with timestamp
|
||||
// frame_timestamp should be rendered, assuming that the system time currently
|
||||
// is now_ms.
|
||||
// |frame_timestamp| should be rendered, assuming that the system time
|
||||
// currently is |now_ms|.
|
||||
virtual int64_t RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms) const;
|
||||
|
||||
// Returns the maximum time in ms that we can wait for a frame to become
|
||||
@ -88,10 +84,6 @@ class VCMTiming {
|
||||
// render delay.
|
||||
int TargetVideoDelay() const;
|
||||
|
||||
// Calculates whether or not there is enough time to decode a frame given a
|
||||
// certain amount of processing time.
|
||||
bool EnoughTimeToDecode(uint32_t available_processing_time_ms) const;
|
||||
|
||||
// Return current timing information. Returns true if the first frame has been
|
||||
// decoded, false otherwise.
|
||||
virtual bool GetTimings(int* decode_ms,
|
||||
|
||||
Reference in New Issue
Block a user