Methods Summary |
---|
public com.coremedia.iso.IsoFile | build(com.googlecode.mp4parser.authoring.Movie movie){@inheritDoc}
LOG.fine("Creating movie " + movie);
IsoFile isoFile = new IsoFile();
isoFile.addBox(createFtyp(movie));
isoFile.addBox(createMoov(movie));
for (Box box : createMoofMdat(movie)) {
isoFile.addBox(box);
}
isoFile.addBox(createMfra(movie, isoFile));
return isoFile;
|
protected DataInformationBox | createDinf(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
DataInformationBox dinf = new DataInformationBox();
DataReferenceBox dref = new DataReferenceBox();
dinf.addBox(dref);
DataEntryUrlBox url = new DataEntryUrlBox();
url.setFlags(1);
dref.addBox(url);
return dinf;
|
public Box | createFtyp(com.googlecode.mp4parser.authoring.Movie movie)
List<String> minorBrands = new LinkedList<String>();
minorBrands.add("isom");
minorBrands.add("iso2");
minorBrands.add("avc1");
return new FileTypeBox("isom", 0, minorBrands);
|
protected Box | createMdat(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int i)
class Mdat implements Box {
ContainerBox parent;
public ContainerBox getParent() {
return parent;
}
public void setParent(ContainerBox parent) {
this.parent = parent;
}
public long getSize() {
long size = 8; // I don't expect 2gig fragments
for (ByteBuffer sample : getSamples(startSample, endSample, track, i)) {
size += sample.remaining();
}
return size;
}
public String getType() {
return "mdat";
}
public void getBox(WritableByteChannel writableByteChannel) throws IOException {
ByteBuffer header = ByteBuffer.allocate(8);
IsoTypeWriter.writeUInt32(header, l2i(getSize()));
header.put(IsoFile.fourCCtoBytes(getType()));
header.rewind();
writableByteChannel.write(header);
List<ByteBuffer> samples = getSamples(startSample, endSample, track, i);
for (ByteBuffer sample : samples) {
writableByteChannel.write(sample);
}
}
public void parse(ReadableByteChannel readableByteChannel, ByteBuffer header, long contentSize, BoxParser boxParser) throws IOException {
}
}
return new Mdat();
|
protected Box | createMdhd(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
MediaHeaderBox mdhd = new MediaHeaderBox();
mdhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
mdhd.setDuration(getDuration(track));
mdhd.setTimescale(track.getTrackMetaData().getTimescale());
mdhd.setLanguage(track.getTrackMetaData().getLanguage());
return mdhd;
|
protected Box | createMdia(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie movie)
MediaBox mdia = new MediaBox();
mdia.addBox(createMdhd(movie, track));
mdia.addBox(createMdiaHdlr(track, movie));
mdia.addBox(createMinf(track, movie));
return mdia;
|
protected Box | createMdiaHdlr(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie movie)
HandlerBox hdlr = new HandlerBox();
hdlr.setHandlerType(track.getHandler());
return hdlr;
|
protected Box | createMfhd(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)
MovieFragmentHeaderBox mfhd = new MovieFragmentHeaderBox();
mfhd.setSequenceNumber(sequenceNumber);
return mfhd;
|
protected Box | createMfra(com.googlecode.mp4parser.authoring.Movie movie, com.coremedia.iso.IsoFile isoFile)Creates a 'mfra' - movie fragment random access box for the given movie in the given
isofile. Uses {@link #createTfra(com.googlecode.mp4parser.authoring.Track, com.coremedia.iso.IsoFile)}
to generate the child boxes.
MovieFragmentRandomAccessBox mfra = new MovieFragmentRandomAccessBox();
for (Track track : movie.getTracks()) {
mfra.addBox(createTfra(track, isoFile));
}
MovieFragmentRandomAccessOffsetBox mfro = new MovieFragmentRandomAccessOffsetBox();
mfra.addBox(mfro);
mfro.setMfraSize(mfra.getSize());
return mfra;
|
protected Box | createMinf(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie movie)
MediaInformationBox minf = new MediaInformationBox();
minf.addBox(track.getMediaHeaderBox());
minf.addBox(createDinf(movie, track));
minf.addBox(createStbl(movie, track));
return minf;
|
protected Box | createMoof(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)Creates a 'moof' box for a given sequence of samples.
MovieFragmentBox moof = new MovieFragmentBox();
moof.addBox(createMfhd(startSample, endSample, track, sequenceNumber));
moof.addBox(createTraf(startSample, endSample, track, sequenceNumber));
TrackRunBox firstTrun = moof.getTrackRunBoxes().get(0);
firstTrun.setDataOffset(1); // dummy to make size correct
firstTrun.setDataOffset((int) (8 + moof.getSize())); // mdat header + moof size
return moof;
|
protected java.util.List | createMoofMdat(com.googlecode.mp4parser.authoring.Movie movie)
List<Box> boxes = new LinkedList<Box>();
HashMap<Track, long[]> intersectionMap = new HashMap<Track, long[]>();
int maxNumberOfFragments = 0;
for (Track track : movie.getTracks()) {
long[] intersects = intersectionFinder.sampleNumbers(track, movie);
intersectionMap.put(track, intersects);
maxNumberOfFragments = Math.max(maxNumberOfFragments, intersects.length);
}
int sequence = 1;
// this loop has two indices:
for (int cycle = 0; cycle < maxNumberOfFragments; cycle++) {
final List<Track> sortedTracks = sortTracksInSequence(movie.getTracks(), cycle, intersectionMap);
for (Track track : sortedTracks) {
if (getAllowedHandlers().isEmpty() || getAllowedHandlers().contains(track.getHandler())) {
long[] startSamples = intersectionMap.get(track);
//some tracks may have less fragments -> skip them
if (cycle < startSamples.length) {
long startSample = startSamples[cycle];
// one based sample numbers - the first sample is 1
long endSample = cycle + 1 < startSamples.length ? startSamples[cycle + 1] : track.getSamples().size() + 1;
// if startSample == endSample the cycle is empty!
if (startSample != endSample) {
boxes.add(createMoof(startSample, endSample, track, sequence));
boxes.add(createMdat(startSample, endSample, track, sequence++));
}
}
}
}
}
return boxes;
|
protected Box | createMoov(com.googlecode.mp4parser.authoring.Movie movie)Creates a fully populated 'moov' box with all child boxes. Child boxes are:
- {@link #createMvhd(com.googlecode.mp4parser.authoring.Movie) mvhd}
- {@link #createMvex(com.googlecode.mp4parser.authoring.Movie) mvex}
- a {@link #createTrak(com.googlecode.mp4parser.authoring.Track, com.googlecode.mp4parser.authoring.Movie) trak} for every track
MovieBox movieBox = new MovieBox();
movieBox.addBox(createMvhd(movie));
movieBox.addBox(createMvex(movie));
for (Track track : movie.getTracks()) {
movieBox.addBox(createTrak(track, movie));
}
// metadata here
return movieBox;
|
protected Box | createMvex(com.googlecode.mp4parser.authoring.Movie movie)Creates a 'mvex' - movie extends box and populates it with 'trex' boxes
by calling {@link #createTrex(com.googlecode.mp4parser.authoring.Movie, com.googlecode.mp4parser.authoring.Track)}
for each track to generate them
MovieExtendsBox mvex = new MovieExtendsBox();
final MovieExtendsHeaderBox mved = new MovieExtendsHeaderBox();
for (Track track : movie.getTracks()) {
final long trackDuration = getTrackDuration(movie, track);
if (mved.getFragmentDuration() < trackDuration) {
mved.setFragmentDuration(trackDuration);
}
}
mvex.addBox(mved);
for (Track track : movie.getTracks()) {
mvex.addBox(createTrex(movie, track));
}
return mvex;
|
protected Box | createMvhd(com.googlecode.mp4parser.authoring.Movie movie)Creates a single 'mvhd' movie header box for a given movie.
MovieHeaderBox mvhd = new MovieHeaderBox();
mvhd.setVersion(1);
mvhd.setCreationTime(DateHelper.convert(new Date()));
mvhd.setModificationTime(DateHelper.convert(new Date()));
long movieTimeScale = movie.getTimescale();
long duration = 0;
for (Track track : movie.getTracks()) {
long tracksDuration = getDuration(track) * movieTimeScale / track.getTrackMetaData().getTimescale();
if (tracksDuration > duration) {
duration = tracksDuration;
}
}
mvhd.setDuration(duration);
mvhd.setTimescale(movieTimeScale);
// find the next available trackId
long nextTrackId = 0;
for (Track track : movie.getTracks()) {
nextTrackId = nextTrackId < track.getTrackMetaData().getTrackId() ? track.getTrackMetaData().getTrackId() : nextTrackId;
}
mvhd.setNextTrackId(++nextTrackId);
return mvhd;
|
protected Box | createStbl(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
SampleTableBox stbl = new SampleTableBox();
stbl.addBox(track.getSampleDescriptionBox());
stbl.addBox(new TimeToSampleBox());
//stbl.addBox(new SampleToChunkBox());
stbl.addBox(new StaticChunkOffsetBox());
return stbl;
|
protected Box | createTfhd(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)
TrackFragmentHeaderBox tfhd = new TrackFragmentHeaderBox();
SampleFlags sf = new SampleFlags();
tfhd.setDefaultSampleFlags(sf);
tfhd.setBaseDataOffset(-1);
tfhd.setTrackId(track.getTrackMetaData().getTrackId());
return tfhd;
|
protected Box | createTfra(com.googlecode.mp4parser.authoring.Track track, com.coremedia.iso.IsoFile isoFile)Creates a 'tfra' - track fragment random access box for the given track with the isoFile.
The tfra contains a map of random access points with time as key and offset within the isofile
as value.
TrackFragmentRandomAccessBox tfra = new TrackFragmentRandomAccessBox();
tfra.setVersion(1); // use long offsets and times
List<TrackFragmentRandomAccessBox.Entry> offset2timeEntries = new LinkedList<TrackFragmentRandomAccessBox.Entry>();
List<Box> boxes = isoFile.getBoxes();
long offset = 0;
long duration = 0;
for (Box box : boxes) {
if (box instanceof MovieFragmentBox) {
List<TrackFragmentBox> trafs = ((MovieFragmentBox) box).getBoxes(TrackFragmentBox.class);
for (int i = 0; i < trafs.size(); i++) {
TrackFragmentBox traf = trafs.get(i);
if (traf.getTrackFragmentHeaderBox().getTrackId() == track.getTrackMetaData().getTrackId()) {
// here we are at the offset required for the current entry.
List<TrackRunBox> truns = traf.getBoxes(TrackRunBox.class);
for (int j = 0; j < truns.size(); j++) {
List<TrackFragmentRandomAccessBox.Entry> offset2timeEntriesThisTrun = new LinkedList<TrackFragmentRandomAccessBox.Entry>();
TrackRunBox trun = truns.get(j);
for (int k = 0; k < trun.getEntries().size(); k++) {
TrackRunBox.Entry trunEntry = trun.getEntries().get(k);
SampleFlags sf = null;
if (k == 0 && trun.isFirstSampleFlagsPresent()) {
sf = trun.getFirstSampleFlags();
} else if (trun.isSampleFlagsPresent()) {
sf = trunEntry.getSampleFlags();
} else {
List<MovieExtendsBox> mvexs = isoFile.getMovieBox().getBoxes(MovieExtendsBox.class);
for (MovieExtendsBox mvex : mvexs) {
List<TrackExtendsBox> trexs = mvex.getBoxes(TrackExtendsBox.class);
for (TrackExtendsBox trex : trexs) {
if (trex.getTrackId() == track.getTrackMetaData().getTrackId()) {
sf = trex.getDefaultSampleFlags();
}
}
}
}
if (sf == null) {
throw new RuntimeException("Could not find any SampleFlags to indicate random access or not");
}
if (sf.getSampleDependsOn() == 2) {
offset2timeEntriesThisTrun.add(new TrackFragmentRandomAccessBox.Entry(
duration,
offset,
i + 1, j + 1, k + 1));
}
duration += trunEntry.getSampleDuration();
}
if (offset2timeEntriesThisTrun.size() == trun.getEntries().size() && trun.getEntries().size() > 0) {
// Oooops every sample seems to be random access sample
// is this an audio track? I don't care.
// I just use the first for trun sample for tfra random access
offset2timeEntries.add(offset2timeEntriesThisTrun.get(0));
} else {
offset2timeEntries.addAll(offset2timeEntriesThisTrun);
}
}
}
}
}
offset += box.getSize();
}
tfra.setEntries(offset2timeEntries);
tfra.setTrackId(track.getTrackMetaData().getTrackId());
return tfra;
|
protected Box | createTkhd(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
TrackHeaderBox tkhd = new TrackHeaderBox();
tkhd.setVersion(1);
int flags = 0;
if (track.isEnabled()) {
flags += 1;
}
if (track.isInMovie()) {
flags += 2;
}
if (track.isInPreview()) {
flags += 4;
}
if (track.isInPoster()) {
flags += 8;
}
tkhd.setFlags(flags);
tkhd.setAlternateGroup(track.getTrackMetaData().getGroup());
tkhd.setCreationTime(DateHelper.convert(track.getTrackMetaData().getCreationTime()));
// We need to take edit list box into account in trackheader duration
// but as long as I don't support edit list boxes it is sufficient to
// just translate media duration to movie timescale
tkhd.setDuration(getTrackDuration(movie, track));
tkhd.setHeight(track.getTrackMetaData().getHeight());
tkhd.setWidth(track.getTrackMetaData().getWidth());
tkhd.setLayer(track.getTrackMetaData().getLayer());
tkhd.setModificationTime(DateHelper.convert(new Date()));
tkhd.setTrackId(track.getTrackMetaData().getTrackId());
tkhd.setVolume(track.getTrackMetaData().getVolume());
return tkhd;
|
protected Box | createTraf(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)
TrackFragmentBox traf = new TrackFragmentBox();
traf.addBox(createTfhd(startSample, endSample, track, sequenceNumber));
for (Box trun : createTruns(startSample, endSample, track, sequenceNumber)) {
traf.addBox(trun);
}
return traf;
|
protected Box | createTrak(com.googlecode.mp4parser.authoring.Track track, com.googlecode.mp4parser.authoring.Movie movie)
LOG.fine("Creating Track " + track);
TrackBox trackBox = new TrackBox();
trackBox.addBox(createTkhd(movie, track));
trackBox.addBox(createMdia(track, movie));
return trackBox;
|
protected Box | createTrex(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
TrackExtendsBox trex = new TrackExtendsBox();
trex.setTrackId(track.getTrackMetaData().getTrackId());
trex.setDefaultSampleDescriptionIndex(1);
trex.setDefaultSampleDuration(0);
trex.setDefaultSampleSize(0);
SampleFlags sf = new SampleFlags();
if ("soun".equals(track.getHandler())) {
// as far as I know there is no audio encoding
// where the sample are not self contained.
sf.setSampleDependsOn(2);
sf.setSampleIsDependedOn(2);
}
trex.setDefaultSampleFlags(sf);
return trex;
|
protected java.util.List | createTruns(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)Creates one or more track run boxes for a given sequence.
TrackRunBox trun = new TrackRunBox();
long[] sampleSizes = getSampleSizes(startSample, endSample, track, sequenceNumber);
trun.setSampleDurationPresent(true);
trun.setSampleSizePresent(true);
List<TrackRunBox.Entry> entries = new ArrayList<TrackRunBox.Entry>(l2i(endSample - startSample));
Queue<TimeToSampleBox.Entry> timeQueue = new LinkedList<TimeToSampleBox.Entry>(track.getDecodingTimeEntries());
long left = startSample - 1;
long curEntryLeft = timeQueue.peek().getCount();
while (left > curEntryLeft) {
left -= curEntryLeft;
timeQueue.remove();
curEntryLeft = timeQueue.peek().getCount();
}
curEntryLeft -= left;
Queue<CompositionTimeToSample.Entry> compositionTimeQueue =
track.getCompositionTimeEntries() != null && track.getCompositionTimeEntries().size() > 0 ?
new LinkedList<CompositionTimeToSample.Entry>(track.getCompositionTimeEntries()) : null;
long compositionTimeEntriesLeft = compositionTimeQueue != null ? compositionTimeQueue.peek().getCount() : -1;
trun.setSampleCompositionTimeOffsetPresent(compositionTimeEntriesLeft > 0);
// fast forward composition stuff
for (long i = 1; i < startSample; i++) {
if (compositionTimeQueue != null) {
//trun.setSampleCompositionTimeOffsetPresent(true);
if (--compositionTimeEntriesLeft == 0 && compositionTimeQueue.size() > 1) {
compositionTimeQueue.remove();
compositionTimeEntriesLeft = compositionTimeQueue.element().getCount();
}
}
}
boolean sampleFlagsRequired = (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty() ||
track.getSyncSamples() != null && track.getSyncSamples().length != 0);
trun.setSampleFlagsPresent(sampleFlagsRequired);
for (int i = 0; i < sampleSizes.length; i++) {
TrackRunBox.Entry entry = new TrackRunBox.Entry();
entry.setSampleSize(sampleSizes[i]);
if (sampleFlagsRequired) {
//if (false) {
SampleFlags sflags = new SampleFlags();
if (track.getSampleDependencies() != null && !track.getSampleDependencies().isEmpty()) {
SampleDependencyTypeBox.Entry e = track.getSampleDependencies().get(i);
sflags.setSampleDependsOn(e.getSampleDependsOn());
sflags.setSampleIsDependedOn(e.getSampleIsDependentOn());
sflags.setSampleHasRedundancy(e.getSampleHasRedundancy());
}
if (track.getSyncSamples() != null && track.getSyncSamples().length > 0) {
// we have to mark non-sync samples!
if (Arrays.binarySearch(track.getSyncSamples(), startSample + i) >= 0) {
sflags.setSampleIsDifferenceSample(false);
sflags.setSampleDependsOn(2);
} else {
sflags.setSampleIsDifferenceSample(true);
sflags.setSampleDependsOn(1);
}
}
// i don't have sample degradation
entry.setSampleFlags(sflags);
}
entry.setSampleDuration(timeQueue.peek().getDelta());
if (--curEntryLeft == 0 && timeQueue.size() > 1) {
timeQueue.remove();
curEntryLeft = timeQueue.peek().getCount();
}
if (compositionTimeQueue != null) {
entry.setSampleCompositionTimeOffset(compositionTimeQueue.peek().getOffset());
if (--compositionTimeEntriesLeft == 0 && compositionTimeQueue.size() > 1) {
compositionTimeQueue.remove();
compositionTimeEntriesLeft = compositionTimeQueue.element().getCount();
}
}
entries.add(entry);
}
trun.setEntries(entries);
return Collections.singletonList(trun);
|
public java.util.List | getAllowedHandlers()
return Arrays.asList("soun", "vide");
|
protected long | getDuration(com.googlecode.mp4parser.authoring.Track track)
long duration = 0;
for (TimeToSampleBox.Entry entry : track.getDecodingTimeEntries()) {
duration += entry.getCount() * entry.getDelta();
}
return duration;
|
public FragmentIntersectionFinder | getFragmentIntersectionFinder()
return intersectionFinder;
|
protected long[] | getSampleSizes(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)Gets the sizes of a sequence of samples-
List<ByteBuffer> samples = getSamples(startSample, endSample, track, sequenceNumber);
long[] sampleSizes = new long[samples.size()];
for (int i = 0; i < sampleSizes.length; i++) {
sampleSizes[i] = samples.get(i).remaining();
}
return sampleSizes;
|
protected java.util.List | getSamples(long startSample, long endSample, com.googlecode.mp4parser.authoring.Track track, int sequenceNumber)Gets the all samples starting with startSample (one based -> one is the first) and
ending with endSample (exclusive).
// since startSample and endSample are one-based substract 1 before addressing list elements
return track.getSamples().subList(l2i(startSample) - 1, l2i(endSample) - 1);
|
private long | getTrackDuration(com.googlecode.mp4parser.authoring.Movie movie, com.googlecode.mp4parser.authoring.Track track)
return getDuration(track) * movie.getTimescale() / track.getTrackMetaData().getTimescale();
|
public void | setIntersectionFinder(FragmentIntersectionFinder intersectionFinder)
this.intersectionFinder = intersectionFinder;
|
protected java.util.List | sortTracksInSequence(java.util.List tracks, int cycle, java.util.Map intersectionMap)Some formats require sorting of the fragments. E.g. Ultraviolet CFF files are required
to contain the fragments size sort:
- video[1].getBytes().length < audio[1].getBytes().length < subs[1].getBytes().length
- audio[2].getBytes().length < video[2].getBytes().length < subs[2].getBytes().length
make this fragment:
- video[1]
- audio[1]
- subs[1]
- audio[2]
- video[2]
- subs[2]
tracks = new LinkedList<Track>(tracks);
Collections.sort(tracks, new Comparator<Track>() {
public int compare(Track o1, Track o2) {
long[] startSamples1 = intersectionMap.get(o1);
long startSample1 = startSamples1[cycle];
// one based sample numbers - the first sample is 1
long endSample1 = cycle + 1 < startSamples1.length ? startSamples1[cycle + 1] : o1.getSamples().size() + 1;
long[] startSamples2 = intersectionMap.get(o2);
long startSample2 = startSamples2[cycle];
// one based sample numbers - the first sample is 1
long endSample2 = cycle + 1 < startSamples2.length ? startSamples2[cycle + 1] : o2.getSamples().size() + 1;
List<ByteBuffer> samples1 = o1.getSamples().subList(l2i(startSample1) - 1, l2i(endSample1) - 1);
List<ByteBuffer> samples2 = o2.getSamples().subList(l2i(startSample2) - 1, l2i(endSample2) - 1);
int size1 = 0;
for (ByteBuffer byteBuffer : samples1) {
size1 += byteBuffer.remaining();
}
int size2 = 0;
for (ByteBuffer byteBuffer : samples2) {
size2 += byteBuffer.remaining();
}
return size1 - size2;
}
});
return tracks;
|