|
From: <tr...@us...> - 2003-07-26 15:17:28
|
Update of /cvsroot/babeldoc/babeldoc/modules/core/src/com/babeldoc/core/pipeline/stage
In directory sc8-pr-cvs1:/tmp/cvs-serv10309
Modified Files:
EntityResolverDiskCache.java DomifyPipelineStage.java
Log Message:
Index: EntityResolverDiskCache.java
===================================================================
RCS file: /cvsroot/babeldoc/babeldoc/modules/core/src/com/babeldoc/core/pipeline/stage/EntityResolverDiskCache.java,v
retrieving revision 1.2
retrieving revision 1.3
diff -C2 -d -r1.2 -r1.3
*** EntityResolverDiskCache.java 24 Jul 2003 23:51:20 -0000 1.2
--- EntityResolverDiskCache.java 25 Jul 2003 13:21:26 -0000 1.3
***************
*** 86,173 ****
**/
public class EntityResolverDiskCache
! implements EntityResolver
! {
! /**
! * Construct a new EntityResolverDiskCache that backends to the given EntityResolver and
! * caches its entities in the given directory.
! **/
! public EntityResolverDiskCache(EntityResolver entityResolver, String baseDirectory)
! throws Exception
! {
! this.entityResolver = entityResolver;
! this.baseDirectory = new File(baseDirectory);
! if (!this.baseDirectory.isDirectory()) {
! throw new Exception("Not a directory.");
! }
! }
!
! /**
! * resolve an entity
! **/
! public InputSource resolveEntity (String publicId, String systemId)
! throws SAXException, IOException
! {
! try {
! File file = null;
! // synchronize on the systemId so two attempts to resolve don't result in two gets from the backend
! synchronized (systemId.intern()) {
! file = new File(baseDirectory, Integer.toString(systemId.hashCode()));
! if (!file.exists()) {
! InputSource inputSource = entityResolver.resolveEntity(publicId, systemId);
! if (inputSource != null) {
! Reader reader = inputSource.getCharacterStream();
! if (reader != null) {
! FileWriter fw = new FileWriter(file);
! char[] buffer = new char[1024];
! int charsRead;
! while ((charsRead = reader.read(buffer)) != 01) {
! fw.write(buffer, 0, charsRead);
! }
! fw.close();
! reader.close();
! }
! else {
! InputStream byteStream = inputSource.getByteStream();
! if (byteStream != null) {
! FileOutputStream outputStream = new FileOutputStream(file);
! byte[] buffer = new byte[1024];
! int bytesRead;
! while ((bytesRead = byteStream.read(buffer)) != -1) {
! outputStream.write(buffer, 0, bytesRead);
! }
! outputStream.close();
! byteStream.close();
! }
! }
! }
! if (!file.exists()) {
! URL url = new URL(systemId);
! InputStream inputStream = url.openStream();
! FileOutputStream outputStream = new FileOutputStream(file);
! byte[] buffer = new byte[1024];
! int bytesRead;
! while ((bytesRead = inputStream.read(buffer)) != -1) {
! outputStream.write(buffer, 0, bytesRead);
! }
! outputStream.close();
! inputStream.close();
! }
}
! }
! InputSource is = new InputSource(new FileInputStream(file));
! is.setPublicId(publicId);
! is.setSystemId(systemId);
! return is;
! }
! catch (RuntimeException t) {
! t.printStackTrace();
! throw t;
}
! }
!
! /** the EntityResolver to backend to **/
! EntityResolver entityResolver;
!
! /** the directory to use as a cache **/
! File baseDirectory;
}
--- 86,168 ----
**/
public class EntityResolverDiskCache
! implements EntityResolver {
! /**
! * Construct a new EntityResolverDiskCache that backends to the given EntityResolver and
! * caches its entities in the given directory.
! **/
! public EntityResolverDiskCache(EntityResolver entityResolver, String baseDirectory)
! throws Exception {
! this.entityResolver = entityResolver;
! this.baseDirectory = new File(baseDirectory);
! if (!this.baseDirectory.isDirectory()) {
! throw new Exception("Not a directory.");
! }
! }
!
! /**
! * resolve an entity
! **/
! public InputSource resolveEntity(String publicId, String systemId)
! throws SAXException, IOException {
! try {
! File file = null;
! // synchronize on the systemId so two attempts to resolve don't result in two gets from the backend
! synchronized (systemId.intern()) {
! file = new File(baseDirectory, Integer.toString(systemId.hashCode()));
! if (!file.exists()) {
! InputSource inputSource = entityResolver.resolveEntity(publicId, systemId);
! if (inputSource != null) {
! Reader reader = inputSource.getCharacterStream();
! if (reader != null) {
! FileWriter fw = new FileWriter(file);
! char[] buffer = new char[1024];
! int charsRead;
! while ((charsRead = reader.read(buffer)) != 01) {
! fw.write(buffer, 0, charsRead);
! }
! fw.close();
! reader.close();
! } else {
! InputStream byteStream = inputSource.getByteStream();
! if (byteStream != null) {
! FileOutputStream outputStream = new FileOutputStream(file);
! byte[] buffer = new byte[1024];
! int bytesRead;
! while ((bytesRead = byteStream.read(buffer)) != -1) {
! outputStream.write(buffer, 0, bytesRead);
! }
! outputStream.close();
! byteStream.close();
! }
}
! }
! if (!file.exists()) {
! URL url = new URL(systemId);
! InputStream inputStream = url.openStream();
! FileOutputStream outputStream = new FileOutputStream(file);
! byte[] buffer = new byte[1024];
! int bytesRead;
! while ((bytesRead = inputStream.read(buffer)) != -1) {
! outputStream.write(buffer, 0, bytesRead);
! }
! outputStream.close();
! inputStream.close();
! }
! }
}
! InputSource is = new InputSource(new FileInputStream(file));
! is.setPublicId(publicId);
! is.setSystemId(systemId);
! return is;
! } catch (RuntimeException t) {
! t.printStackTrace();
! throw t;
! }
! }
!
! /** the EntityResolver to backend to **/
! EntityResolver entityResolver;
!
! /** the directory to use as a cache **/
! File baseDirectory;
}
Index: DomifyPipelineStage.java
===================================================================
RCS file: /cvsroot/babeldoc/babeldoc/modules/core/src/com/babeldoc/core/pipeline/stage/DomifyPipelineStage.java,v
retrieving revision 1.12
retrieving revision 1.13
diff -C2 -d -r1.12 -r1.13
*** DomifyPipelineStage.java 24 Jul 2003 14:06:13 -0000 1.12
--- DomifyPipelineStage.java 25 Jul 2003 13:21:26 -0000 1.13
***************
*** 190,204 ****
}
! // public IConfigInfo getPipelineInfo() {
! // return info;
! // }
!
public static Document getCachedDom(PipelineDocument document) {
! Document doc = (Document)domCache.get(document);
! return doc;
}
public static void putCachedDom(PipelineDocument document, Document doc) {
! domCache.put(document, doc);
}
--- 190,212 ----
}
! /**
! * Get the dom object cached on this pipelinedocument (stored on the
! * weak hashmap)
! *
! * @param document
! * @return
! */
public static Document getCachedDom(PipelineDocument document) {
! return (Document)domCache.get(document);
}
+ /**
+ * Store the document in the dom cache
+ *
+ * @param document
+ * @param doc
+ */
public static void putCachedDom(PipelineDocument document, Document doc) {
! domCache.put(document, doc);
}
***************
*** 233,238 ****
}
} catch (Exception e) {
! e.printStackTrace();
! com.babeldoc.core.LogService.getInstance().logError(e);
}
--- 241,245 ----
}
} catch (Exception e) {
! LogService.getInstance().logError(e);
}
***************
*** 431,435 ****
* @throws PipelineException DOCUMENT ME!
*/
! public PipelineStageResult[] process() throws PipelineException {
try {
String schemaFile = getOptions(SCHEMA_FILE);
--- 438,443 ----
* @throws PipelineException DOCUMENT ME!
*/
! public PipelineStageResult[] process()
! throws PipelineException {
try {
String schemaFile = getOptions(SCHEMA_FILE);
|
|
From: Stefan K. <ste...@co...> - 2003-07-28 20:56:08
|
Hello,
We are using the SQL journal in a multi-threaded J2EE environment and got
some DB error, because of duplicated IDs in LOG table.
We solved it by making getNextStep and the DB Update a critical section. I
think, this makes sense and should go into the babeldoc code.
Regards,
Stefan
// copied from GenericSQLJournal with synchronization
// getNextStep + DB Update is a critical section
protected void log(IJournalTicket ticket, JournalOperation operation,
String other, String stage, Object data) throws
JournalException {
//getLog().debug("[GenericSqlJournal.log] called");
// Get the next step for the ticket
if (!ticket.isDummy()) {
PreparedStatement pstmt = null;
Connection con = null;
String logInsertStmt = SqlQueryManager.getSqlQuery(LOG_INSERT);
//getLog().debug("[GenericSqlJournal.log] sql: "+logInsertStmt);
long id = ((JournalTicket)ticket).getValue();
try {
con = (Connection) resource.checkOut();
pstmt = con.prepareStatement(logInsertStmt);
String pstageName = (stage != null) ? stage : "null";
String additional = "";
if (operation.equals(JournalOperation.updateStatus)) {
additional = (String) data;
}
//getLog().debug("[GenericSqlJournal.log] Writing step: "+step);
pstmt.setLong(1, id);
pstmt.setLong(3, new java.util.Date().getTime());
pstmt.setString(4, operation.toString());
pstmt.setString(5, other);
pstmt.setString(6, pstageName);
pstmt.setString(7, additional);
long step ;
// without synchronized it is likely to get the same value
// from getNextStep several times in multiple threads
synchronized ( this.getClass() ) {
step = getNextStep(con, id);
pstmt.setLong(2, step);
pstmt.executeUpdate();
}
if (operation.equals(JournalOperation.updateDocument)) {
writeDelta(con, id, step,
(com.babeldoc.core.pipeline.PipelineDocument) data);
}
} catch (Exception se) {
LogService.getInstance().logError(se);
} finally {
try {
if (pstmt != null) {
pstmt.close();
}
resource.checkIn(con);
} catch (Exception se) {
LogService.getInstance().logError(I18n.get("sql.403"), se);
}
}
}
}
|