From: <bra...@us...> - 2010-05-18 23:01:20
|
Revision: 3108 http://archive-access.svn.sourceforge.net/archive-access/?rev=3108&view=rev Author: bradtofel Date: 2010-05-18 23:01:13 +0000 (Tue, 18 May 2010) Log Message: ----------- TWEAK: removed calls to wbrequest setContextPrefix and setServerPrefix Modified Paths: -------------- trunk/archive-access/projects/wayback/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java Modified: trunk/archive-access/projects/wayback/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java =================================================================== --- trunk/archive-access/projects/wayback/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java 2010-05-18 23:00:00 UTC (rev 3107) +++ trunk/archive-access/projects/wayback/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java 2010-05-18 23:01:13 UTC (rev 3108) @@ -70,9 +70,6 @@ WaybackRequest wbRequest = new WaybackRequest(); wbRequest.setAccessPoint(inner); - wbRequest.setContextPrefix(inner.getUrlRoot()); - wbRequest.setServerPrefix(inner.getUrlRoot()); - wbRequest.setLiveWebRequest(true); wbRequest.setRequestUrl(urlString); URL url = null; @@ -86,14 +83,14 @@ CaptureSearchResult result = new CaptureSearchResult(); result.setOriginalUrl(urlString); result.setUrlKey(urlString); - // should we check robots, first? + // check robots first, if configured if(robotFactory != null) { int ruling = robotFactory.get().filterObject(result); if(ruling == ExclusionFilter.FILTER_EXCLUDE) { throw new RobotAccessControlException(urlString + "is blocked by robots.txt"); } } - // robots says GO: + // no robots check, or robots.txt says GO: ArcResource r = (ArcResource) cache.getCachedResource(url, maxCacheMS , false); ARCRecord ar = (ARCRecord) r.getArcRecord(); int status = ar.getStatusCode(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |