2222import com .otaliastudios .transcoder .internal .ValidatorException ;
2323import com .otaliastudios .transcoder .sink .DataSink ;
2424import com .otaliastudios .transcoder .sink .InvalidOutputFormatException ;
25- import com .otaliastudios .transcoder .sink .MediaMuxerDataSink ;
25+ import com .otaliastudios .transcoder .sink .DefaultDataSink ;
2626import com .otaliastudios .transcoder .source .DataSource ;
2727import com .otaliastudios .transcoder .strategy .TrackStrategy ;
2828import com .otaliastudios .transcoder .time .TimeInterpolator ;
3737import androidx .annotation .Nullable ;
3838
3939import java .util .ArrayList ;
40- import java .util .Arrays ;
4140import java .util .HashSet ;
4241import java .util .List ;
4342import java .util .Set ;
@@ -205,7 +204,6 @@ private void closeCurrentStep(@NonNull TrackType type) {
205204 private TrackTranscoder getCurrentTrackTranscoder (@ NonNull TrackType type , @ NonNull TranscoderOptions options ) {
206205 int current = mCurrentStep .require (type );
207206 int last = mTranscoders .require (type ).size () - 1 ;
208- int max = mDataSources .require (type ).size ();
209207 if (last == current ) {
210208 // We have already created a transcoder for this step.
211209 // But this step might be completed and we might need to create a new one.
@@ -251,26 +249,49 @@ public long interpolate(@NonNull TrackType type, long time) {
251249 };
252250 }
253251
254- private double getTrackProgress (@ NonNull TrackType type ) {
255- if (!mStatuses .require (type ).isTranscoding ()) return 0.0D ;
252+ private long getTrackDurationUs (@ NonNull TrackType type ) {
253+ if (!mStatuses .require (type ).isTranscoding ()) return 0L ;
256254 int current = mCurrentStep .require (type );
257255 long totalDurationUs = 0 ;
258- long completedDurationUs = 0 ;
259256 for (int i = 0 ; i < mDataSources .require (type ).size (); i ++) {
260257 DataSource source = mDataSources .require (type ).get (i );
261- if (i < current ) {
258+ if (i < current ) { // getReadUs() is a better approximation for sure.
262259 totalDurationUs += source .getReadUs ();
263- completedDurationUs += source .getReadUs ();
264- } else if (i == current ) {
265- totalDurationUs += source .getDurationUs ();
266- completedDurationUs += source .getReadUs ();
267260 } else {
268261 totalDurationUs += source .getDurationUs ();
269- completedDurationUs += 0 ;
270262 }
271263 }
272- if (totalDurationUs == 0 ) totalDurationUs = 1 ;
273- return (double ) completedDurationUs / (double ) totalDurationUs ;
264+ return totalDurationUs ;
265+ }
266+
267+ private long getTotalDurationUs () {
268+ boolean hasVideo = hasVideoSources () && mStatuses .requireVideo ().isTranscoding ();
269+ boolean hasAudio = hasAudioSources () && mStatuses .requireVideo ().isTranscoding ();
270+ long video = hasVideo ? getTrackDurationUs (TrackType .VIDEO ) : Long .MAX_VALUE ;
271+ long audio = hasAudio ? getTrackDurationUs (TrackType .AUDIO ) : Long .MAX_VALUE ;
272+ return Math .min (video , audio );
273+ }
274+
275+ private long getTrackReadUs (@ NonNull TrackType type ) {
276+ if (!mStatuses .require (type ).isTranscoding ()) return 0L ;
277+ int current = mCurrentStep .require (type );
278+ long completedDurationUs = 0 ;
279+ for (int i = 0 ; i < mDataSources .require (type ).size (); i ++) {
280+ DataSource source = mDataSources .require (type ).get (i );
281+ if (i <= current ) {
282+ completedDurationUs += source .getReadUs ();
283+ }
284+ }
285+ return completedDurationUs ;
286+ }
287+
288+ private double getTrackProgress (@ NonNull TrackType type ) {
289+ if (!mStatuses .require (type ).isTranscoding ()) return 0.0D ;
290+ long readUs = getTrackReadUs (type );
291+ long totalUs = getTotalDurationUs ();
292+ LOG .v ("getTrackProgress - readUs:" + readUs + ", totalUs:" + totalUs );
293+ if (totalUs == 0 ) totalUs = 1 ; // Avoid NaN
294+ return (double ) readUs / (double ) totalUs ;
274295 }
275296
276297 /**
@@ -281,7 +302,7 @@ private double getTrackProgress(@NonNull TrackType type) {
281302 * @throws InterruptedException when cancel to transcode
282303 */
283304 public void transcode (@ NonNull TranscoderOptions options ) throws InterruptedException {
284- mDataSink = new MediaMuxerDataSink (options .getOutputPath ());
305+ mDataSink = new DefaultDataSink (options .getOutputPath ());
285306 mDataSources .setVideo (options .getVideoDataSources ());
286307 mDataSources .setAudio (options .getAudioDataSources ());
287308
@@ -295,16 +316,7 @@ public void transcode(@NonNull TranscoderOptions options) throws InterruptedExce
295316 }
296317 }
297318
298- // Compute total duration: it is the minimum between the two.
299- long audioDurationUs = hasAudioSources () ? 0 : Long .MAX_VALUE ;
300- long videoDurationUs = hasVideoSources () ? 0 : Long .MAX_VALUE ;
301- for (DataSource source : options .getVideoDataSources ()) videoDurationUs += source .getDurationUs ();
302- for (DataSource source : options .getAudioDataSources ()) audioDurationUs += source .getDurationUs ();
303- long totalDurationUs = Math .min (audioDurationUs , videoDurationUs );
304- LOG .v ("Duration (us): " + totalDurationUs );
305-
306- // TODO if audio and video have different lengths, we should clip the longer one!
307- // TODO ClipDataSource or something like that, to choose
319+ // TODO ClipDataSource or something like that
308320
309321 // Compute the TrackStatus.
310322 int activeTracks = 0 ;
@@ -314,6 +326,7 @@ public void transcode(@NonNull TranscoderOptions options) throws InterruptedExce
314326 TrackStatus audioStatus = mStatuses .requireAudio ();
315327 if (videoStatus .isTranscoding ()) activeTracks ++;
316328 if (audioStatus .isTranscoding ()) activeTracks ++;
329+ LOG .v ("Duration (us): " + getTotalDurationUs ());
317330
318331 // Pass to Validator.
319332 //noinspection UnusedAssignment
@@ -331,22 +344,35 @@ public void transcode(@NonNull TranscoderOptions options) throws InterruptedExce
331344 long loopCount = 0 ;
332345 boolean stepped = false ;
333346 boolean audioCompleted = false , videoCompleted = false ;
347+ boolean forceAudioEos = false , forceVideoEos = false ;
348+ double audioProgress = 0 , videoProgress = 0 ;
334349 while (!(audioCompleted && videoCompleted )) {
335350 if (Thread .interrupted ()) {
336351 throw new InterruptedException ();
337352 }
338353 stepped = false ;
354+
355+ // First, check if we have to force an input end of stream for some track.
356+ // This can happen, for example, if user adds 1 minute (video only) with 20 seconds
357+ // of audio. The video track must be stopped once the audio stops.
358+ long totalUs = getTotalDurationUs () + 100 /* tolerance */ ;
359+ forceAudioEos = getTrackReadUs (TrackType .AUDIO ) > totalUs ;
360+ forceVideoEos = getTrackReadUs (TrackType .VIDEO ) > totalUs ;
361+
362+ // Now step for transcoders that are not completed.
339363 audioCompleted = isCompleted (TrackType .AUDIO );
340364 videoCompleted = isCompleted (TrackType .VIDEO );
341365 if (!audioCompleted ) {
342- stepped |= getCurrentTrackTranscoder (TrackType .AUDIO , options ).transcode ();
366+ stepped |= getCurrentTrackTranscoder (TrackType .AUDIO , options ).transcode (forceAudioEos );
343367 }
344368 if (!videoCompleted ) {
345- stepped |= getCurrentTrackTranscoder (TrackType .VIDEO , options ).transcode ();
369+ stepped |= getCurrentTrackTranscoder (TrackType .VIDEO , options ).transcode (forceVideoEos );
346370 }
347371 if (++loopCount % PROGRESS_INTERVAL_STEPS == 0 ) {
348- setProgress ((getTrackProgress (TrackType .VIDEO )
349- + getTrackProgress (TrackType .AUDIO )) / activeTracks );
372+ audioProgress = getTrackProgress (TrackType .AUDIO );
373+ videoProgress = getTrackProgress (TrackType .VIDEO );
374+ LOG .v ("progress - video:" + videoProgress + " audio:" + audioProgress );
375+ setProgress ((videoProgress + audioProgress ) / activeTracks );
350376 }
351377 if (!stepped ) {
352378 Thread .sleep (SLEEP_TO_WAIT_TRACK_TRANSCODERS );
0 commit comments