FileDocCategorySizeDatePackage
AviParser.javaAPI DocJMF 2.1.1e34326Mon May 12 12:20:52 BST 2003com.sun.media.parser.video

AviParser

public class AviParser extends BasicPullParser

Fields Summary
private static ContentDescriptor[]
supportedFormat
private PullSourceStream
stream
private CachedStream
cacheStream
private Track[]
tracks
private Seekable
seekableStream
private int
numSupportedTracks
private int
length
private int
audioTrack
private int
videoTrack
private int
keyFrameTrack
private static final int
SIZE_OF_AVI_INDEX
private static final int
AVIH_HEADER_LENGTH
private static final int
STRH_HEADER_LENGTH
private static final int
STRF_VIDEO_HEADER_LENGTH
private static final int
STRF_AUDIO_HEADER_LENGTH
static final int
AVIF_HASINDEX
static final int
AVIF_MUSTUSEINDEX
static final int
AVIF_ISINTERLEAVED
static final int
AVIF_WASCAPTUREFILE
static final int
AVIF_COPYRIGHTED
static final int
AVIF_KEYFRAME
static final String
AUDIO
static final String
VIDEO
static final String
LISTRECORDCHUNK
static final String
VIDEO_MAGIC
static final String
VIDEO_MAGIC_JPEG
static final String
VIDEO_MAGIC_IV32a
static final String
VIDEO_MAGIC_IV32b
static final String
VIDEO_MAGIC_IV31
static final String
VIDEO_MAGIC_CVID
static final String
AUDIO_MAGIC
private int
usecPerFrame
private long
nanoSecPerFrame
private int
maxBytesPerSecond
private int
paddingGranularity
private int
flags
private int
totalFrames
private int
initialFrames
private int
numTracks
private int
suggestedBufferSize
private int
width
private int
height
private TrakList[]
trakList
private int
idx1MinimumChunkOffset
private int
moviOffset
private Time
duration
private boolean
moviChunkSeen
private boolean
idx1ChunkSeen
private int
maxAudioChunkIndex
private int
maxVideoChunkIndex
private int
extraHeaderLength
private byte[]
codecSpecificHeader
private Object
seekSync
Constructors Summary
Methods Summary
private int[]buildIndexToKeyFrameIndexTable(int[] syncSamples, int numKeyFrames, int numberOfSamples)

	
	int[] syncSampleMapping = new int[numberOfSamples];
	int index = 0;
	int previous;
	if (syncSamples[0] != 0) {
	    // Bug in the sync table of the avi file
	    // The first sample should always be a key frame
	    previous = syncSampleMapping[0] = 0;
	} else {
	    previous = syncSampleMapping[0] = 0;
	    index++;
	}
	
	for (; index < numKeyFrames; index++) {
	    int next = syncSamples[index];
	    for (int j = previous+1; j < next; j++) {
		syncSampleMapping[j] = previous;
	    }

	    syncSampleMapping[next] = next;

	    previous = next;
	}
	int lastSyncFrame = syncSamples[numKeyFrames - 1];
	for (index = lastSyncFrame+1; index < numberOfSamples; index++) {
	    syncSampleMapping[index] = lastSyncFrame;
	}
	return syncSampleMapping;
    
public javax.media.TimegetDuration()

	return duration;
    
private longgetLocation()

	return getLocation(stream);
    
public javax.media.TimegetMediaTime()

	return null;  // TODO
    
public java.lang.StringgetName()
Returns a descriptive name for the plug-in. This is a user readable string.

	return "Parser for avi file format";
    
public javax.media.protocol.ContentDescriptor[]getSupportedInputContentDescriptors()

	return supportedFormat;
    
public javax.media.Track[]getTracks()

 	if (tracks != null)
 	    return tracks;
	
	if (seekableStream == null) {
	    return new Track[0];
	}

	readHeader();
 	if (!moviChunkSeen) {
 	    throw new BadHeaderException("No movi chunk");
 	}

	if (!idx1ChunkSeen) {
	    // System.err.println("Currently files with no idx1 chunk are not supported");
 	    throw new BadHeaderException("Currently files with no idx1 chunk are not supported");
	}

	if (numTracks <= 0) {
	    throw new BadHeaderException("Error parsing header");
	}

	tracks = new Track[numTracks];


	// System.out.println("Number of tracks: " + tracks.length);
	for (int i = 0; i < tracks.length; i++) {
	    // System.out.println("Track # " + (i+1));
	    TrakList trakInfo = trakList[i];
	    if (trakInfo.trackType.equals(AUDIO)) {
		tracks[i] = new AudioTrack(trakInfo);
	    } else if (trakInfo.trackType.equals(VIDEO)) {
// 		System.out.println("Number of frames in Video track is " +
// 				   trakInfo.maxChunkIndex);
		tracks[i] = new VideoTrack(trakInfo);
	    }
	}
	return tracks;

    
private booleanisSupported(java.lang.String trackType)

	return ( trackType.equals(VIDEO) || trackType.equals(AUDIO) );
    
private voidparseAVIH(int length)

 	try {
	    if (length < AVIH_HEADER_LENGTH) {
		throw new BadHeaderException("avih: header size is not 56");
	    }

	    usecPerFrame = readInt(stream, /* bigEndian = */ false);
	    nanoSecPerFrame = usecPerFrame * 1000;
	    maxBytesPerSecond = readInt(stream, /* bigEndian = */ false);
	    paddingGranularity = readInt(stream, /* bigEndian = */ false);
	    flags = readInt(stream, /* bigEndian = */ false);
	    totalFrames = readInt(stream, /* bigEndian = */ false);
	    initialFrames = readInt(stream, /* bigEndian = */ false);
	    numTracks = readInt(stream, /* bigEndian = */ false);
	    suggestedBufferSize = readInt(stream, /* bigEndian = */ false);
	    width = readInt(stream, /* bigEndian = */ false);
	    height = readInt(stream, /* bigEndian = */ false);
	    skip(stream, 4*4); // int reserved[4]
	    if ( (length - AVIH_HEADER_LENGTH) > 0)
		skip(stream, length - AVIH_HEADER_LENGTH);
 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    
private voidparseHDRL()


	try {
	    String next = readString(stream);
	    if (!next.equals("avih")) {
		throw new BadHeaderException("AVI Parser: expected string AVIH, got "
					 + next);
	    }
	    int headerLength = readInt(stream, /* bigEndian = */ false);
	    parseAVIH(headerLength);
	    trakList = new TrakList[numTracks];
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing hdrl");
	}

    
private voidparseIDX1(int length)

	try {
	    if (!moviChunkSeen) {
		throw new BadHeaderException("idx1 chunk appears before movi chunk");
	    }
	    // TODO: check for valid length value
	    int numIndices = (length / SIZE_OF_AVI_INDEX);
	    String id;
	    int flag;
	    int chunkOffset;
	    int chunkLength;

	    for (int i = 0; i < numTracks; i++) {
		if (trakList[i] == null) {
		    throw new BadHeaderException("Bad file format");
		}
		trakList[i].chunkInfo = new AVIIndexEntry[numIndices];
		if (trakList[i].trackType.equals(VIDEO)) {
		    trakList[i].keyFrames = new int[numIndices];
		}
	    }

	    idx1MinimumChunkOffset = Integer.MAX_VALUE;

	    for (int i = 0; i < numIndices; i++) {
		id = readString(stream);
 		if (id.equals(LISTRECORDCHUNK)) {
		    // $$$ DISCARD for now
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
 		    continue;
		}
		int streamNumber;
		try {
		    streamNumber = Integer.parseInt(id.substring(0,2));
		} catch (NumberFormatException e) {
		    // DISCARD chunk at it doesn't represent a stream
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
		    readInt(stream, /* bigEndian = */ false);
 		    continue;
		}

		if ( (streamNumber < 0) || (streamNumber >= numTracks) ) {
		    throw new BadHeaderException("index chunk has illegal stream # " +
						 streamNumber);
		}
 		flag = readInt(stream, /* bigEndian = */ false);
 		chunkOffset = readInt(stream, /* bigEndian = */ false);
 		chunkLength = readInt(stream, /* bigEndian = */ false);
		
		AVIIndexEntry[] chunkInfo = trakList[streamNumber].chunkInfo;
		int index = trakList[streamNumber].maxChunkIndex;
		
		chunkInfo[index] = new AVIIndexEntry();
		chunkInfo[index].id = id;
		chunkInfo[index].flag = flag;
		chunkInfo[index].chunkOffset = chunkOffset;
		chunkInfo[index].chunkLength = chunkLength;
		
		if (trakList[streamNumber].trackType.equals(AUDIO)) {
		    int c = trakList[streamNumber].tmpCumulativeChunkLength += chunkLength;
		    chunkInfo[index].cumulativeChunkLength = c;
		}
		
		if (trakList[streamNumber].trackType.equals(VIDEO)) {
		    if ( (flag & AVIF_KEYFRAME) > 0 ) {
			int keyFrameIndex = trakList[streamNumber].numKeyFrames;
			trakList[streamNumber].keyFrames[keyFrameIndex] = index;
			trakList[streamNumber].numKeyFrames++;
		    }
		}
		trakList[streamNumber].maxChunkIndex++;
		
		if (chunkOffset < idx1MinimumChunkOffset) {
		    idx1MinimumChunkOffset = chunkOffset;
		}
	    }

	    // For video tracks, if all the frames are not key frames,
	    // build the indexToKeyframeIndex table
	    // which maps a video frame to a key frame.
	    for (int i = 0; i < numTracks; i++) {
		if (trakList[i].trackType.equals(VIDEO)) {
		    int numKeyFrames = trakList[i].numKeyFrames;
		    if (numKeyFrames > 0)
			keyFrameTrack = i;
		    int maxChunkIndex = trakList[i].maxChunkIndex;
		    if ( (numKeyFrames > 0) && (numKeyFrames < maxChunkIndex) ) {
			trakList[i].indexToKeyframeIndex =
			    buildIndexToKeyFrameIndexTable(trakList[i].keyFrames,
							   numKeyFrames,
							   maxChunkIndex);
		    }
		    trakList[i].keyFrames = null;
		}
	    }

	    if (idx1MinimumChunkOffset >=  moviOffset) {
		// idx1 chunk offsets refer to start of the file.
		moviOffset = 0;
	    }
	    moviOffset += 8; // for chunk id and size
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing IDX1");
	}
	idx1ChunkSeen = true;
    
private voidparseMOVI(int length)

	try {
	    moviChunkSeen = true;
	    if ( (flags & AVIF_HASINDEX) > 0) {
		// Subtract 4 to include MOVI string
		moviOffset = (int) getLocation(stream) - 4;
		skip(stream, length);
	    } else {
		// System.out.println("parseMOVI: NO AVIF_HASINDEX"); // REMOVE
	    }
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing movi");
	}
    
private voidparseSTRF(int length, int currentTrack)

  	try {
	    String trackType = trakList[currentTrack].trackType;
	    if (trackType.equals(VIDEO)) {
		Video video = new Video();
		video.size = readInt(stream, /* bigEndian = */ false);
		video.width = readInt(stream, /* bigEndian = */ false);
		video.height = readInt(stream, /* bigEndian = */ false);
		video.planes = readShort(stream, /* bigEndian = */ false);
		video.depth = readShort(stream, /* bigEndian = */ false);
		// Instead of readString, read the four bytes to see
		// if its a raw format.
		byte [] intArray = new byte[4];
		readBytes(stream, intArray, 4);
		if (intArray[0] > 32) {
		    video.compressor = new String(intArray);
		} else {
		    switch (intArray[0]) {
		    case 0:
			video.compressor = VideoFormat.RGB;
			break;
		    case 1:
			video.compressor = "rle8";
			break;
		    case 2:
			video.compressor = "rle4";
			break;
		    case 3:
			video.compressor = VideoFormat.RGB;
			break;
		    }
		}
		
		// Get the BITMAPINFO data needed by the decompressor
		BitMapInfo bmi = new BitMapInfo();
		bmi.biWidth = video.width;
		bmi.biHeight = video.height;
		bmi.biPlanes = video.planes;
		bmi.biBitCount = video.depth;
		bmi.fourcc = new String(video.compressor);
		video.bitMapInfo = bmi;
		bmi.biSizeImage = readInt(stream, false);
		bmi.biXPelsPerMeter = readInt(stream, false);
		bmi.biYPelsPerMeter = readInt(stream, false);
		bmi.biClrUsed = readInt(stream, false);
		bmi.biClrImportant = readInt(stream, false);
		

		if ( (length - STRF_VIDEO_HEADER_LENGTH) > 0) {
		    bmi.extraSize = (length - STRF_VIDEO_HEADER_LENGTH);
		    bmi.extraBytes = new byte[bmi.extraSize];
		    readBytes(stream, bmi.extraBytes, bmi.extraSize);
		}
		
		trakList[currentTrack].media = video;
		trakList[currentTrack].media.maxSampleSize =
		    trakList[currentTrack].suggestedBufferSize;
		videoTrack = currentTrack;
	    } else if (trackType.equals(AUDIO)) {
		Audio audio = new Audio();

                audio.formatTag = readShort(stream, /* bigEndian = */ false);
                audio.channels = readShort(stream, /* bigEndian = */ false);
                audio.sampleRate = readInt(stream, /* bigEndian = */ false);
                audio.avgBytesPerSec = readInt(stream, /* bigEndian = */ false);
                audio.blockAlign = readShort(stream, /* bigEndian = */ false);
                audio.bitsPerSample = readShort(stream, /* bigEndian = */ false);

		int remainingFormatSize = length - STRF_AUDIO_HEADER_LENGTH;

		codecSpecificHeader = null;
		int extraFieldsSize = 0;
		if (remainingFormatSize >= 2) {
		    extraFieldsSize = readShort(stream, /* bigEndian = */ false);
		    remainingFormatSize -= 2;

		    if (extraFieldsSize > 0) {
			codecSpecificHeader = new byte[extraFieldsSize];
			readBytes(stream, codecSpecificHeader, codecSpecificHeader.length);
			remainingFormatSize -= extraFieldsSize;
		    }


		    // TODO: do other encodings provide samplesPerBlock?
		    // Note that this info is there in codecSpecificHeader
		    if ( (audio.formatTag == WavAudioFormat.WAVE_FORMAT_ADPCM) ||
			 (audio.formatTag == WavAudioFormat.WAVE_FORMAT_DVI_ADPCM) ||
			 (audio.formatTag == WavAudioFormat.WAVE_FORMAT_GSM610) ) {

			if (extraFieldsSize < 2) {
			    throw new
				BadHeaderException("samplesPerBlock field not available for encoding" + audio.formatTag);
							 
			}
			audio.samplesPerBlock = BasicPullParser.parseShortFromArray(codecSpecificHeader,
								/* bigEndian = */ false);
		    }
		}
		    
		if (remainingFormatSize < 0) {
		    throw new BadHeaderException("Avi Parser: incorrect headersize in the STRF");
		}

 		if ( remainingFormatSize > 0)
 		    skip(stream, length - STRF_AUDIO_HEADER_LENGTH);

		trakList[currentTrack].media = audio;
		audioTrack = currentTrack;
	    } else {
		throw new BadHeaderException("strf: unsupported stream type " + trackType);
	    }

 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    
private voidparseSTRH(int length, int currentTrack)

  	try {
	    if (length < STRH_HEADER_LENGTH) {
		throw new BadHeaderException("strh: header length should be atleast " +
					     STRH_HEADER_LENGTH + " but is " +
					     length);
	    }

	    trakList[currentTrack] = new TrakList();
	    trakList[currentTrack].trackType = readString(stream);
	    trakList[currentTrack].streamHandler = readString(stream);
	    trakList[currentTrack].flags = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].priority = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].initialFrames = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].scale = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].rate = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].start = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].length = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].suggestedBufferSize = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].quality = readInt(stream, /* bigEndian = */ false);
	    trakList[currentTrack].sampleSize = readInt(stream, /* bigEndian = */ false);

	    skip(stream, 8); // Padding
	    if ( (length - STRH_HEADER_LENGTH) > 0)
		skip(stream, length - STRH_HEADER_LENGTH);

 	} catch (IOException e) {
 	    throw new BadHeaderException("IOException when parsing hdrl");
 	}
    
private voidparseSTRL(int length, int currentTrack)

 	try {
	    if (currentTrack >= trakList.length ) {
		throw new BadHeaderException("inconsistent number of strl atoms");
	    }

	    length -= 12; // for "LIST <length> strl"
	    while (length >= 12) { // TODO: check
		String subchunkid = readString(stream);
		int subchunkLength = readInt(stream, /* bigEndian = */ false);
		if (subchunkid.equals("strh")) {
		    parseSTRH(subchunkLength, currentTrack);
		} else if (subchunkid.equals("strf")) {
		    if (trakList[currentTrack] == null) {
			throw new BadHeaderException("strf doesn't have a strh atom preceding it");
		    }
		    parseSTRF(subchunkLength, currentTrack);
		} else {
		    // System.err.println("Unsupported subchunk " + subchunkid +
		    //	       " in strl. length " + subchunkLength);
		    if ( (subchunkLength & 1) > 0) {
			// Some avi files like billy.avi are don't have strn
			// chunks with incorrect odd number for the length.
			// The actual offset is 1 more. If this correction
			// is not made all the remaining chunks will be read
			// incorrectly.
			subchunkLength++;
		    }
		    skip(stream, subchunkLength);
		}
		length -= (subchunkLength + 4); // 4 is for subchunkid
	    }
	} catch (IOException e) {
	    throw new BadHeaderException("IOException when parsing hdrl");
	}
    
private voidreadHeader()

	
	String magicRIFF = readString(stream);
	if (!(magicRIFF.equals("RIFF"))) {
	    throw new BadHeaderException("AVI Parser: expected string RIFF, got "
					 + magicRIFF);
	}

	length = readInt(stream, /* bigEndian = */ false);
	length += 8; // For RIFF and AVI

	String magicAVI = readString(stream);
	if (!(magicAVI.equals("AVI "))) {
	    throw new BadHeaderException("AVI Parser: expected string AVI, got "
					 + magicAVI);
	}

	int currentTrack = 0;
	while (getLocation(stream) <= (length-12)) {
	    String next = readString(stream);
	    int subchunkLength = readInt(stream, /* bigEndian = */ false);
	    if (next.equals("LIST")) {
		String subchunk = readString(stream);
		if (subchunk.equals("hdrl")) {
		    parseHDRL();
		} else if (subchunk.equals("strl")) {
		    parseSTRL(subchunkLength, currentTrack);
		    currentTrack++;
		} else if (subchunk.equals("movi"))
		    parseMOVI(subchunkLength - 4);
		else {
		    // System.err.println("Unsupported subchunk " + subchunk +
		    //  " in LIST");
		    skip(stream, subchunkLength-4);
		}
	    } else if (next.equals("idx1")) {
		parseIDX1(subchunkLength);
	    } else {
		skip(stream, subchunkLength);
		if ( (subchunkLength & 1) > 0)
		    skip(stream, 1);
	    }
	}
	if ( (totalFrames != 0) && (usecPerFrame != 0) ) {
	    duration = new Time((long) usecPerFrame * totalFrames * 1000);
	}
    
public javax.media.TimesetPosition(javax.media.Time where, int rounding)

	int keyframeNum = -1;
	if ( (keyFrameTrack != -1) && (tracks[keyFrameTrack].isEnabled()) ) {
	    // keyframe track present and is enabled

	    TrakList trakInfo = trakList[keyFrameTrack];
	    Track track = tracks[keyFrameTrack];
	    int frameNum = track.mapTimeToFrame(where);
	    keyframeNum = frameNum;
	    // TODO: handle FRAME_UNKNOWN

	    if (trakInfo.indexToKeyframeIndex.length > frameNum) {
		keyframeNum = trakInfo.indexToKeyframeIndex[frameNum];
	    }

	    if (keyframeNum != frameNum) {
		where = track.mapFrameToTime(keyframeNum);
	    }
	}
	for (int i = 0; i < numTracks; i++) {
	    if (!tracks[i].isEnabled())
		continue;

	    int chunkNumber =0;
	    int offsetWithinChunk = 0;
	    try {
		if (i == keyFrameTrack) {
		    chunkNumber = keyframeNum;
		    continue;
		}

		TrakList trakInfo = trakList[i];
		if (trakInfo.trackType.equals("vids")) {
		    if (usecPerFrame != 0) {
			chunkNumber = (int) (where.getNanoseconds() / nanoSecPerFrame);
			if (chunkNumber < 0)
			    chunkNumber = 0;
			else if (chunkNumber >= trakInfo.maxChunkIndex) {
			    continue; // EOM
			}
		    }
		} else if (trakInfo.trackType.equals("auds")) {
		    int bytePos = (int) ( where.getSeconds() *
					  ((Audio) trakInfo.media).avgBytesPerSec);
		    if (bytePos < 0)
			bytePos = 0;

		    // Note: the else statement can also handle the if
		    // case, ie maxChunkIndex == 1, but is separated here
		    // for clarity and a slight efficiency.
		    if (trakInfo.maxChunkIndex == 1) {
			if (bytePos >= trakInfo.chunkInfo[0].chunkLength) {
			    chunkNumber = trakInfo.maxChunkIndex; // EOM
			    continue; // EOM
			}
			chunkNumber = 0;
			offsetWithinChunk = bytePos;
		    } else {
			int approx;
			chunkNumber = trakInfo.getChunkNumber(bytePos);
			if (chunkNumber >= trakInfo.maxChunkIndex)
			    continue; // EOM
			
			approx = trakInfo.chunkInfo[chunkNumber].cumulativeChunkLength -
			    trakInfo.chunkInfo[chunkNumber].chunkLength;
			offsetWithinChunk = bytePos - approx;
		    }

		    if ( (offsetWithinChunk & 1) > 0)
			offsetWithinChunk--;
		    
		    int blockAlign = ((Audio) trakInfo.media).blockAlign;
		    if (blockAlign != 0) {
			offsetWithinChunk -= (offsetWithinChunk % blockAlign);
		    }
		}
	    } finally {
		((MediaTrack)tracks[i]).setChunkNumberAndOffset(chunkNumber,
								offsetWithinChunk);
	    }
	}
	return where;
    
public voidsetSource(javax.media.protocol.DataSource source)


	super.setSource(source);
	stream = (PullSourceStream) streams[0];
	seekableStream = (Seekable) streams[0];
    
protected booleansupports(javax.media.protocol.SourceStream[] streams)
Avi format requires that the stream be seekable and random accessible.


                     
        
	return seekable;