[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
Exception while retrieving the body
Hi,
I am using soot 2.1.0 and I get the following exception when retrieving
the body for the attached class. I used jikes to compile the attached
source.
Warning: target of a branch is null
84: tableswitch[84]
tableswitch($stack0) { case 301: goto goto [?= $stack0 = l0];
case 302: goto null; default:
goto $stack0 = new java.io.BufferedInputStream; }successor: null
java.lang.NullPointerException
at soot.toolkits.graph.UnitGraph.<init>(UnitGraph.java:255)
at soot.toolkits.graph.UnitGraph.<init>(UnitGraph.java:72)
at
soot.toolkits.graph.CompleteUnitGraph.<init>(CompleteUnitGraph.java:54)
at
soot.toolkits.scalar.LocalSplitter.internalTransform(LocalSplitter.java:78)
at soot.BodyTransformer.transform(BodyTransformer.java:51)
at soot.Transform.apply(Transform.java:88)
at soot.JimpleBodyPack.applyPhaseOptions(JimpleBodyPack.java:59)
at soot.JimpleBodyPack.internalApply(JimpleBodyPack.java:91)
at soot.Pack.apply(Pack.java:113)
at soot.coffi.CoffiMethodSource.getBody(CoffiMethodSource.java:115)
at soot.SootMethod.getBodyFromMethodSource(SootMethod.java:79)
at soot.SootMethod.retrieveActiveBody(SootMethod.java:274)
Any ideas what may be wrong? If a particular fix since v2.1.0 addresses
this issue, can you please point out this fix?
waiting for reply,
--
Venkatesh Prasad Ranganath,
Dept. Computing and Information Science,
Kansas State University, US.
web: http://www.cis.ksu.edu/~rvprasad
package net.javacoding.jspider.core.task.work;
import net.javacoding.jspider.api.model.HTTPHeader;
import net.javacoding.jspider.api.model.Site;
import net.javacoding.jspider.core.SpiderContext;
import net.javacoding.jspider.core.logging.LogFactory;
import net.javacoding.jspider.core.event.CoreEvent;
import net.javacoding.jspider.core.event.impl.*;
import net.javacoding.jspider.core.task.WorkerTask;
import net.javacoding.jspider.core.util.http.HTTPHeaderUtil;
import java.io.*;
import java.net.*;
/**
* $Id: FetchRobotsTXTTaskImpl.java,v 1.1 2004/04/09 15:53:36 tcw Exp $
*/
public class FetchRobotsTXTTaskImpl extends BaseWorkerTaskImpl {
protected URL url;
protected Site site;
public FetchRobotsTXTTaskImpl(SpiderContext context, URL url, Site site) {
super(context, WorkerTask.WORKERTASK_SPIDERTASK);
this.url = url;
this.site = site;
}
public void prepare() {
context.throttle(site);
}
public void execute() {
CoreEvent event = null;
URLConnection connection = null;
InputStream inputStream = null;
int httpStatus = 0;
HTTPHeader[] headers = null;
try {
connection = url.openConnection();
// RFC states that redirects should be followed.
// see: http://www.robotstxt.org/wc/norobots-rfc.txt
((HttpURLConnection) connection).setInstanceFollowRedirects(true);
connection.setRequestProperty("User-agent", site.getUserAgent() );
context.preHandle(connection, site);
long start = System.currentTimeMillis();
connection.connect();
if (connection instanceof HttpURLConnection) {
httpStatus = ((HttpURLConnection) connection).getResponseCode();
switch (httpStatus) {
case HttpURLConnection.HTTP_MOVED_PERM:
case HttpURLConnection.HTTP_MOVED_TEMP:
return;
default:
break;
}
}
inputStream = new BufferedInputStream(connection.getInputStream());
ByteArrayOutputStream os = new ByteArrayOutputStream();
InputStream is = new BufferedInputStream(inputStream);
try {
int i = is.read();
while (i != -1) {
os.write(i);
i = is.read();
}
} catch (IOException e) {
LogFactory.getLog(FetchRobotsTXTTaskImpl.class).error("i/o exception during fetch robots.txt",e);
}
String contentType = connection.getContentType();
int size = connection.getContentLength();
int timeMs = (int) (System.currentTimeMillis() - start);
headers = HTTPHeaderUtil.getHeaders(connection);
if (httpStatus >= 200 && httpStatus < 303) {
event = new RobotsTXTSpideredOkEvent(url,context, url, httpStatus, connection, contentType, timeMs, size, os.toByteArray(), headers);
} else if (httpStatus >= 400 && httpStatus < 500) {
event = new RobotsTXTUnexistingEvent(url,context, url, httpStatus, connection, headers, null);
} else {
event = new RobotsTXTSpideredErrorEvent(url,context, url, httpStatus, connection, headers, null);
}
} catch (FileNotFoundException e) {
headers = HTTPHeaderUtil.getHeaders(connection);
event = new RobotsTXTUnexistingEvent(url,context, url, 404, connection, headers, e);
} catch (Exception e) {
event = new RobotsTXTSpideredErrorEvent(url,context, url, httpStatus, connection, headers, e);
} finally {
notifyEvent(url, event);
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
LogFactory.getLog(FetchRobotsTXTTaskImpl.class).error("i/o exception closing inputstream",e);
}
}
}
}
}