Reputation: 5830
As i said, with more videos I do, the audio gets more and more out of sync. How can i fix this? I have the following code to append the videos;
public class ConcatenateVideos extends ExecutorAsyncTask<String, Void, Boolean> {
private ArrayList<String> video_urls = null;
private final String TAG = ConcatenateVideos.class.getSimpleName();
public void setUris(ArrayList<String> videos) {
LogService.log(TAG, "set uris");
if (videos != null) {
video_urls = videos;
this.execute();
}
}
@Override
protected Boolean doInBackground(String... params) {
boolean success = false;
FileInputStream[] videos = new FileInputStream[video_urls.size()];
try {
for (int i = 0; i < video_urls.size(); i++) {
videos[i] = new FileInputStream(video_urls.get(i));
}
success = concatenateVideos(videos);
} catch (Exception e) {
success = false;
LogService.err(TAG, e.getMessage(), e);
}
return success;
}
private boolean concatenateVideos(InputStream[] video_streams) {
boolean success = false;
Movie[] inMovies = new Movie[video_streams.length];
FileChannel fc = null;
Movie result = new Movie();
IsoFile out = null;
try {
for (int i = 0; i < inMovies.length; i++) {
if (video_streams[i] != null) {
inMovies[i] = MovieCreator.build(Channels.newChannel(video_streams[i]));
}
}
List<Track> videoTracks = new LinkedList<Track>();
List<Track> audioTracks = new LinkedList<Track>();
for (Movie m : inMovies) {
for (Track t : m.getTracks()) {
if (t.getHandler().equals("soun")) {
audioTracks.add(t);
}
if (t.getHandler().equals("vide")) {
videoTracks.add(t);
}
}
}
if (audioTracks.size() > 0) {
result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()])));
}
if (videoTracks.size() > 0) {
result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()])));
}
out = new DefaultMp4Builder().build(result);
fc = new RandomAccessFile(video_urls.get(0), "rw").getChannel();
for (int i = 1; i < video_urls.size(); i++) {
File f = new File(video_urls.get(i));
LogService.log(TAG, "delete file : " + f.delete());
}
success = true;
} catch (Exception e) {
LogService.err(TAG, e.getMessage(), e);
success = false;
} finally {
try {
LogService.log(TAG, "==========finally");
if (fc != null) {
fc.position(0);
out.getBox(fc);
fc.close();
}
} catch (Exception e) {
LogService.err(TAG, e.getMessage(), e);
}
}
return success;
}
}
And this is the Service I use to call this ConcatenateVideos function:
private final String TAG = ConcatenateVideosService.class.getSimpleName();
final Messenger myMessenger = new Messenger(new IncomingHandler());
class IncomingHandler extends Handler {
private Messenger client = null;
@Override
public void handleMessage(Message msg) {
// init messenger
if (client == null) {
client = msg.replyTo;
}
// get the message
Bundle data = msg.getData();
byte dataString = data.getByte("message");
switch (dataString) {
case Constants.INIT_CMD_SERVICE:
LogService.log(TAG, "INIT_CMD_SERVICE:");
break;
case Constants.CONCATE_CMD_SERVICE:
LogService.log(TAG, "CONCATE_CMD_SERVICE:");
ArrayList<String> videos = data.getStringArrayList(Constants.SERVICE_VIDEO_URLS);
ConcatenateVideos concatenateVideos = new ConcatenateVideos() {
@Override
protected void onPostExecute(Boolean result) {
LogService.log(TAG, "onPostExecute() , result : " + result);
super.onPostExecute(result);
// setup the answer
Message answer = Message.obtain();
Bundle bundle = new Bundle();
bundle.putBoolean("result", result);
answer.setData(bundle);
// send the answer
try {
client.send(answer);
} catch (RemoteException e) {
LogService.err(TAG, e.getMessage(), e);
}
}
};
concatenateVideos.setUris(videos);
break;
}
}
}
@Override
public boolean onUnbind(Intent intent) {
stopSelf();
return super.onUnbind(intent);
}
@Override
public IBinder onBind(Intent intent) {
return myMessenger.getBinder();
}
@Override
public void onDestroy() {
super.onDestroy();
}
My videos are recorded at he following quality: VideoBitrate - 800000
, audioBR - 64000
, audioSamplingRate - 44100
, MPEG_4. H264 Container
, .AAC at 30fps
.
Now I made a test, and if I make 4 videos, the video Timescale is 90000
, audio Timescale is 44100
for every video.
But after appending the videos, the audio TimeScale
of the videos is still 44100
, but the video Timescale
is: 900
. Why does the VideoTimeScale
change and not the audio one?
Upvotes: 1
Views: 2087
Reputation: 1023
I know this question is old but I faced same problem without a clear solution and taking code from here and there I made a couple of functions to solve this problem.
@Throws(Exception::class)
fun appendVideos(videoPathList: List<String>, targetFilePath: String) {
val movies = videoPathList.flatMap { file -> listOf(MovieCreator.build(file)) }
val finalMovie = Movie()
val videoTracksTotal = mutableListOf<Track>()
val audioTracksTotal = mutableListOf<Track>()
var audioDuration = 0.0
var videoDuration = 0.0
movies.forEach { movie ->
val videoTracks = mutableListOf<Track>()
val audioTracks = mutableListOf<Track>()
movie.tracks.forEach { track ->
val trackDuration = track.sampleDurations.toList()
.map { t -> t.toDouble() / track.trackMetaData.timescale }.sum()
if (track.handler == "vide") {
videoDuration += trackDuration
videoTracks.add(track)
} else if (track.handler == "soun") {
audioDuration += trackDuration
audioTracks.add(track)
}
}
// Adjusting Durations
adjustDurations(videoTracks, audioTracks, videoDuration, audioDuration).let {
audioDuration = it.audioDuration
videoDuration = it.videoDuration
}
videoTracksTotal.addAll(videoTracks)
audioTracksTotal.addAll(audioTracks)
}
if (videoTracksTotal.isNotEmpty() && audioTracksTotal.isNotEmpty()) {
finalMovie.addTrack(AppendTrack(*videoTracksTotal.toTypedArray()))
finalMovie.addTrack(AppendTrack(*audioTracksTotal.toTypedArray()))
}
val container = DefaultMp4Builder().build(finalMovie)
val fos = FileOutputStream(targetFilePath)
val bb = Channels.newChannel(fos)
container.writeContainer(bb)
fos.close()
}
class Durations(val audioDuration: Double, val videoDuration: Double)
private fun adjustDurations(
videoTracks: MutableList<Track>,
audioTracks: MutableList<Track>,
videoDuration: Double,
audioDuration: Double
): Durations {
var diff = audioDuration - videoDuration
val tracks: MutableList<Track>
var durationOperator: Double
val isAudioProblem: Boolean
when {
// audio and video match, no operations to perform
diff == 0.0 -> {
return Durations(audioDuration, videoDuration)
}
// audio tracks are longer than video
diff > 0 -> {
tracks = audioTracks
durationOperator = audioDuration
isAudioProblem = true
}
// video tracks are longer than audio
else -> {
tracks = videoTracks
durationOperator = videoDuration
diff *= -1.0
isAudioProblem = false
}
}
// Getting the last track in order to operate with it
var track: Track = tracks.last()
var counter: Long = 0
// Reversing SampleDuration list
track.sampleDurations.toList().asReversed().forEach { sampleDuration ->
// Calculating how much this track need to be re-adjusted
if (sampleDuration.toDouble() / track.trackMetaData.timescale > diff) {
return@forEach
}
diff -= sampleDuration.toDouble() / track.trackMetaData.timescale
durationOperator -= sampleDuration.toDouble() / track.trackMetaData.timescale
counter++
}
if (counter != 0L) {
// Cropping track
track = CroppedTrack(track, 0, track.samples.size - counter)
//update the original reference
tracks.removeAt(tracks.lastIndex)
tracks.add(track)
}
// Returning durations
return if (isAudioProblem) {
Durations(durationOperator, videoDuration)
} else {
Durations(audioDuration, durationOperator)
}
}
Upvotes: 0
Reputation: 2468
In many case the recordings (audio / video) have different lengths. Let's say audio recordings are always 10.0 s and video are always 10.1 s. If you just play one movie like that audio may end before the video. It's kind of automatically silenced.
If you add two of these videos the first audio starts at 0 s and the second 10.0 - unfortunately the second video starts are 10.1 and voilà you got a sync problem.
You will need to compensate the different run lengths by appending silence or even by dropping some frames!
Upvotes: 2