FileDocCategorySizeDatePackage
SyncSampleIntersectFinderImpl.javaAPI Docmp4parser 1.0-RC-1714915Wed Dec 19 20:10:37 GMT 2012com.googlecode.mp4parser.authoring.builder

SyncSampleIntersectFinderImpl

public class SyncSampleIntersectFinderImpl extends Object implements FragmentIntersectionFinder
This FragmentIntersectionFinder cuts the input movie video tracks in fragments of the same length exactly before the sync samples. Audio tracks are cut into pieces of similar length.

Fields Summary
private static Logger
LOG
private static Map
getTimesCache
private static Map
getSampleNumbersCache
private final int
minFragmentDurationSeconds
Constructors Summary
public SyncSampleIntersectFinderImpl()


      
        minFragmentDurationSeconds = 0;
    
public SyncSampleIntersectFinderImpl(int minFragmentDurationSeconds)
Creates a SyncSampleIntersectFinderImpl that will not create any fragment smaller than the given minFragmentDurationSeconds

param
minFragmentDurationSeconds the smallest allowable duration of a fragment.

        this.minFragmentDurationSeconds = minFragmentDurationSeconds;
    
Methods Summary
private static longcalculateTracktimesScalingFactor(com.googlecode.mp4parser.authoring.Movie m, com.googlecode.mp4parser.authoring.Track track)

        long timeScale = 1;
        for (Track track1 : m.getTracks()) {
            if (track1.getHandler().equals(track.getHandler())) {
                if (track1.getTrackMetaData().getTimescale() != track.getTrackMetaData().getTimescale()) {
                    timeScale = lcm(timeScale, track1.getTrackMetaData().getTimescale());
                }
            }
        }
        return timeScale;
    
public long[]getCommonIndices(long[] syncSamples, long[] syncSampleTimes, long timeScale, long[] otherTracksTimes)

        List<Long> nuSyncSamples = new LinkedList<Long>();
        List<Long> nuSyncSampleTimes = new LinkedList<Long>();


        for (int i = 0; i < syncSampleTimes.length; i++) {
            boolean foundInEveryRef = true;
            for (long[] times : otherTracksTimes) {
                foundInEveryRef &= (Arrays.binarySearch(times, syncSampleTimes[i]) >= 0);
            }

            if (foundInEveryRef) {
                // use sample only if found in every other track.
                nuSyncSamples.add(syncSamples[i]);
                nuSyncSampleTimes.add(syncSampleTimes[i]);
            }
        }
        // We have two arrays now:
        // nuSyncSamples: Contains all common sync samples
        // nuSyncSampleTimes: Contains the times of all sync samples

        // Start: Warn user if samples are not matching!
        if (nuSyncSamples.size() < (syncSamples.length * 0.25)) {
            String log = "";
            log += String.format("%5d - Common:  [", nuSyncSamples.size());
            for (long l : nuSyncSamples) {
                log += (String.format("%10d,", l));
            }
            log += ("]");
            LOG.warning(log);
            log = "";

            log += String.format("%5d - In    :  [", syncSamples.length);
            for (long l : syncSamples) {
                log += (String.format("%10d,", l));
            }
            log += ("]");
            LOG.warning(log);
            LOG.warning("There are less than 25% of common sync samples in the given track.");
            throw new RuntimeException("There are less than 25% of common sync samples in the given track.");
        } else if (nuSyncSamples.size() < (syncSamples.length * 0.5)) {
            LOG.fine("There are less than 50% of common sync samples in the given track. This is implausible but I'm ok to continue.");
        } else if (nuSyncSamples.size() < syncSamples.length) {
            LOG.finest("Common SyncSample positions vs. this tracks SyncSample positions: " + nuSyncSamples.size() + " vs. " + syncSamples.length);
        }
        // End: Warn user if samples are not matching!




        List<Long> finalSampleList = new LinkedList<Long>();

        if (minFragmentDurationSeconds > 0) {
            // if minFragmentDurationSeconds is greater 0
            // we need to throw away certain samples.
            long lastSyncSampleTime = -1;
            Iterator<Long> nuSyncSamplesIterator = nuSyncSamples.iterator();
            Iterator<Long> nuSyncSampleTimesIterator = nuSyncSampleTimes.iterator();
            while (nuSyncSamplesIterator.hasNext() && nuSyncSampleTimesIterator.hasNext()) {
                long curSyncSample = nuSyncSamplesIterator.next();
                long curSyncSampleTime = nuSyncSampleTimesIterator.next();
                if (lastSyncSampleTime == -1 || (curSyncSampleTime - lastSyncSampleTime) / timeScale >= minFragmentDurationSeconds) {
                    finalSampleList.add(curSyncSample);
                    lastSyncSampleTime = curSyncSampleTime;
                }
            }
        } else {
            // the list of all samples is the final list of samples
            // since minFragmentDurationSeconds ist not used.
            finalSampleList = nuSyncSamples;
        }


        // transform the list to an array
        long[] finalSampleArray = new long[finalSampleList.size()];
        for (int i = 0; i < finalSampleArray.length; i++) {
            finalSampleArray[i] = finalSampleList.get(i);
        }
        return finalSampleArray;

    
public static java.util.ListgetSyncSamplesTimestamps(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
Calculates the timestamp of all tracks' sync samples.

param
movie
param
track
return

        List<long[]> times = new LinkedList<long[]>();
        for (Track currentTrack : movie.getTracks()) {
            if (currentTrack.getHandler().equals(track.getHandler())) {
                long[] currentTrackSyncSamples = currentTrack.getSyncSamples();
                if (currentTrackSyncSamples != null && currentTrackSyncSamples.length > 0) {
                    final long[] currentTrackTimes = getTimes(currentTrack, movie);
                    times.add(currentTrackTimes);
                }
            }
        }
        return times;
    
private static long[]getTimes(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie m)

        final CacheTuple key = new CacheTuple(track, m);
        final long[] result = getTimesCache.get(key);
        if (result != null) {
            return result;
        }

        long[] syncSamples = track.getSyncSamples();
        long[] syncSampleTimes = new long[syncSamples.length];
        Queue<TimeToSampleBox.Entry> timeQueue = new LinkedList<TimeToSampleBox.Entry>(track.getDecodingTimeEntries());

        int currentSample = 1;  // first syncsample is 1
        long currentDuration = 0;
        long currentDelta = 0;
        int currentSyncSampleIndex = 0;
        long left = 0;

        final long scalingFactor = calculateTracktimesScalingFactor(m, track);

        while (currentSample <= syncSamples[syncSamples.length - 1]) {
            if (currentSample++ == syncSamples[currentSyncSampleIndex]) {
                syncSampleTimes[currentSyncSampleIndex++] = currentDuration * scalingFactor;
            }
            if (left-- == 0) {
                TimeToSampleBox.Entry entry = timeQueue.poll();
                left = entry.getCount() - 1;
                currentDelta = entry.getDelta();
            }
            currentDuration += currentDelta;
        }
        getTimesCache.put(key, syncSampleTimes);
        return syncSampleTimes;
    
public long[]sampleNumbers(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie movie)
Gets an array of sample numbers that are meant to be the first sample of each chunk or fragment.

param
track concerned track
param
movie the context of the track
return
an array containing the ordinal of each fragment's first sample

        final CacheTuple key = new CacheTuple(track, movie);
        final long[] result = getSampleNumbersCache.get(key);
        if (result != null) {
            return result;
        }

        if ("vide".equals(track.getHandler())) {
            if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
                List<long[]> times = getSyncSamplesTimestamps(movie, track);
                final long[] commonIndices = getCommonIndices(track.getSyncSamples(), getTimes(track, movie), track.getTrackMetaData().getTimescale(), times.toArray(new long[times.size()][]));
                getSampleNumbersCache.put(key, commonIndices);
                return commonIndices;
            } else {
                throw new RuntimeException("Video Tracks need sync samples. Only tracks other than video may have no sync samples.");
            }
        } else if ("soun".equals(track.getHandler())) {
            Track referenceTrack = null;
            for (Track candidate : movie.getTracks()) {
                if (candidate.getSyncSamples() != null && "vide".equals(candidate.getHandler()) && candidate.getSyncSamples().length > 0) {
                    referenceTrack = candidate;
                }
            }
            if (referenceTrack != null) {

                // Gets the reference track's fra
                long[] refSyncSamples = sampleNumbers(referenceTrack, movie);

                int refSampleCount = referenceTrack.getSamples().size();

                long[] syncSamples = new long[refSyncSamples.length];
                long minSampleRate = 192000;
                for (Track testTrack : movie.getTracks()) {
                    if ("soun".equals(testTrack.getHandler())) {
                        AudioSampleEntry ase = (AudioSampleEntry) testTrack.getSampleDescriptionBox().getSampleEntry();
                        if (ase.getSampleRate() < minSampleRate) {
                            minSampleRate = ase.getSampleRate();
                            long sc = testTrack.getSamples().size();
                            double stretch = (double) sc / refSampleCount;
                            TimeToSampleBox.Entry sttsEntry = testTrack.getDecodingTimeEntries().get(0);
                            long samplesPerFrame = sttsEntry.getDelta(); // Assuming all audio tracks have the same number of samples per frame, which they do for all known types

                            for (int i = 0; i < syncSamples.length; i++) {
                                long start = (long) Math.ceil(stretch * (refSyncSamples[i] - 1) * samplesPerFrame);
                                syncSamples[i] = start;
                                // The Stretch makes sure that there are as much audio and video chunks!
                            }
                            break;
                        }
                    }
                }
                AudioSampleEntry ase = (AudioSampleEntry) track.getSampleDescriptionBox().getSampleEntry();
                TimeToSampleBox.Entry sttsEntry = track.getDecodingTimeEntries().get(0);
                long samplesPerFrame = sttsEntry.getDelta(); // Assuming all audio tracks have the same number of samples per frame, which they do for all known types
                double factor = (double) ase.getSampleRate() / (double) minSampleRate;
                if (factor != Math.rint(factor)) { // Not an integer
                    throw new RuntimeException("Sample rates must be a multiple of the lowest sample rate to create a correct file!");
                }
                for (int i = 0; i < syncSamples.length; i++) {
                    syncSamples[i] = (long) (1 + syncSamples[i] * factor / (double) samplesPerFrame);
                }
                getSampleNumbersCache.put(key, syncSamples);
                return syncSamples;
            }
            throw new RuntimeException("There was absolutely no Track with sync samples. I can't work with that!");
        } else {
            // Ok, my track has no sync samples - let's find one with sync samples.
            for (Track candidate : movie.getTracks()) {
                if (candidate.getSyncSamples() != null && candidate.getSyncSamples().length > 0) {
                    long[] refSyncSamples = sampleNumbers(candidate, movie);
                    int refSampleCount = candidate.getSamples().size();

                    long[] syncSamples = new long[refSyncSamples.length];
                    long sc = track.getSamples().size();
                    double stretch = (double) sc / refSampleCount;

                    for (int i = 0; i < syncSamples.length; i++) {
                        long start = (long) Math.ceil(stretch * (refSyncSamples[i] - 1)) + 1;
                        syncSamples[i] = start;
                        // The Stretch makes sure that there are as much audio and video chunks!
                    }
                    getSampleNumbersCache.put(key, syncSamples);
                    return syncSamples;
                }
            }
            throw new RuntimeException("There was absolutely no Track with sync samples. I can't work with that!");
        }