FileDocCategorySizeDatePackage
TRTrackerScraperResponseImpl.javaAPI DocAzureus 3.0.3.44859Sun Jul 16 03:35:20 BST 2006org.gudy.azureus2.core3.tracker.client.impl

TRTrackerScraperResponseImpl

public abstract class TRTrackerScraperResponseImpl extends Object implements TRTrackerScraperResponse
author
Olivier
author
TuxPaper

Fields Summary
private org.gudy.azureus2.core3.util.HashWrapper
hash
private int
seeds
private int
peers
private long
scrapeStartTime
private long
nextScrapeStartTime
private String
sStatus
private String
sLastStatus
private int
status
private int
last_status
Constructors Summary
protected TRTrackerScraperResponseImpl(org.gudy.azureus2.core3.util.HashWrapper _hash)


   
                                       
	     
  
    this( _hash, -1, -1, -1);
  
protected TRTrackerScraperResponseImpl(org.gudy.azureus2.core3.util.HashWrapper _hash, int _seeds, int _peers, long _scrapeStartTime)

    hash = _hash;
    seeds = _seeds;
    peers = _peers;

    scrapeStartTime = _scrapeStartTime;
    
    status = (!isValid()) ? TRTrackerScraperResponse.ST_INITIALIZING : TRTrackerScraperResponse.ST_ONLINE;
    nextScrapeStartTime = -1;
  
Methods Summary
public static intcalcScrapeIntervalSecs(int iRecIntervalSecs, int iNumSeeds)
Calculate Scrape interval, applying internal min/max limits and default calculations.

param
iRecIntervalSecs Recommended Interval in Seconds, or 0 for no recommendation
param
iNumSeeds # of seeds torrent has, used to calculate scrape interval
return
Calculated interval in Seconds

		final int MIN = 15 * 60;
		final int MAX = 3 * 60 * 60;

		// Min 15 min, plus 10 seconds for every seed
		// ex. 10 Seeds = 15m + 100s = ~16.66m
		// 60 seeds = 15m + 600s = ~25m
		// 1000 seeds = 15m + 10000s = ~2h 52m
		int scrapeInterval = MIN + (iNumSeeds * 10);
		if (iRecIntervalSecs > scrapeInterval)
			scrapeInterval = iRecIntervalSecs;

		if (scrapeInterval > MAX)
			scrapeInterval = MAX;

		return scrapeInterval;
	
public org.gudy.azureus2.core3.util.HashWrappergetHash()

    return hash;
  
public longgetNextScrapeStartTime()

    return nextScrapeStartTime;
  
public intgetPeers()

    return peers;
  
public longgetScrapeStartTime()

    return scrapeStartTime;
  
public intgetSeeds()

    return seeds ;
  
public intgetStatus()

    return status;
  
public java.lang.StringgetStatusString()

    return sStatus;
  
public java.lang.StringgetString()

	  return( getURL() + ": " + ByteFormatter.encodeString(hash.getBytes()) +",seeds=" + seeds + ",peers=" + peers +",state="+status+
			  "/"+sStatus+",last="+last_status+"/"+sLastStatus+",start="+scrapeStartTime+",next="+nextScrapeStartTime);
	
public booleanisValid()

    return !(seeds == -1 && peers == -1);
  
public voidrevertStatus()

    status = last_status;
    sStatus = sLastStatus;
  
public voidsetNextScrapeStartTime(long _nextScrapeStartTime)

    nextScrapeStartTime = _nextScrapeStartTime;
  
public voidsetPeers(int p)

  	peers	= p;
  
public voidsetScrapeStartTime(long time)

    scrapeStartTime = time;
  
public voidsetSeeds(int s)

  	seeds	= s;
  
protected voidsetStatus(java.lang.String str)

	  sStatus	= str;
  
public voidsetStatus(int iNewStatus, java.lang.String sNewStatus)

    if (last_status != status && iNewStatus != status)
      last_status = status;
    if (iNewStatus == TRTrackerScraperResponse.ST_ONLINE) {
      status = (!isValid()) ? TRTrackerScraperResponse.ST_INITIALIZING : TRTrackerScraperResponse.ST_ONLINE;
    } else {
      status = iNewStatus;
    }
    
    if (sNewStatus == null)
      return;

    if (!sLastStatus.equals(sStatus)) {
      sLastStatus = sStatus;
    }
    sStatus = sNewStatus;
  
public voidsetStatus(int s)

  	status	= s;