You can subscribe to this list here.
2002 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
(22) |
Nov
(308) |
Dec
(131) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2003 |
Jan
(369) |
Feb
(171) |
Mar
(236) |
Apr
(187) |
May
(218) |
Jun
(217) |
Jul
(127) |
Aug
(448) |
Sep
(270) |
Oct
(231) |
Nov
(422) |
Dec
(255) |
2004 |
Jan
(111) |
Feb
(73) |
Mar
(338) |
Apr
(351) |
May
(349) |
Jun
(495) |
Jul
(394) |
Aug
(1048) |
Sep
(499) |
Oct
(142) |
Nov
(269) |
Dec
(638) |
2005 |
Jan
(825) |
Feb
(1272) |
Mar
(593) |
Apr
(690) |
May
(950) |
Jun
(958) |
Jul
(767) |
Aug
(839) |
Sep
(525) |
Oct
(449) |
Nov
(585) |
Dec
(455) |
2006 |
Jan
(603) |
Feb
(656) |
Mar
(195) |
Apr
(114) |
May
(136) |
Jun
(100) |
Jul
(128) |
Aug
(68) |
Sep
(7) |
Oct
(1) |
Nov
(1) |
Dec
(8) |
2007 |
Jan
(4) |
Feb
(3) |
Mar
(8) |
Apr
(16) |
May
(5) |
Jun
(4) |
Jul
(6) |
Aug
(23) |
Sep
(15) |
Oct
(5) |
Nov
(7) |
Dec
(5) |
2008 |
Jan
(5) |
Feb
(1) |
Mar
(1) |
Apr
(5) |
May
(1) |
Jun
(1) |
Jul
|
Aug
|
Sep
|
Oct
|
Nov
(1) |
Dec
|
2009 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
(1) |
Dec
|
2011 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
(1) |
2012 |
Jan
|
Feb
|
Mar
|
Apr
(1) |
May
|
Jun
(1) |
Jul
(1) |
Aug
(1) |
Sep
|
Oct
(2) |
Nov
(3) |
Dec
(2) |
2013 |
Jan
(1) |
Feb
|
Mar
(2) |
Apr
(1) |
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2014 |
Jan
|
Feb
|
Mar
(1) |
Apr
|
May
(2) |
Jun
(1) |
Jul
|
Aug
(1) |
Sep
(1) |
Oct
|
Nov
(1) |
Dec
|
2015 |
Jan
|
Feb
|
Mar
|
Apr
(1) |
May
|
Jun
|
Jul
|
Aug
|
Sep
(1) |
Oct
|
Nov
|
Dec
|
2016 |
Jan
|
Feb
|
Mar
(1) |
Apr
|
May
(1) |
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2017 |
Jan
(1) |
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv3914/src/org/hibernate/hql/ast/exec Modified Files: AbstractStatementExecutor.java MultiTableDeleteExecutor.java MultiTableUpdateExecutor.java Log Message: HHH-1419 : bulk deletes/updates, subqueries and column qualifications Index: AbstractStatementExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/AbstractStatementExecutor.java,v retrieving revision 1.6 retrieving revision 1.7 diff -u -d -r1.6 -r1.7 --- AbstractStatementExecutor.java 20 Jan 2006 12:57:29 -0000 1.6 +++ AbstractStatementExecutor.java 10 Feb 2006 17:30:19 -0000 1.7 @@ -50,9 +50,8 @@ protected abstract Queryable[] getAffectedQueryables(); - protected String generateIdInsertSelect(Queryable persister, AST whereClause) { + protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) { Select select = new Select( getFactory().getDialect() ); - String tableAlias = "bulk_target"; SelectFragment selectFragment = new SelectFragment() .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) ); Index: MultiTableDeleteExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/MultiTableDeleteExecutor.java,v retrieving revision 1.3 retrieving revision 1.4 diff -u -d -r1.3 -r1.4 --- MultiTableDeleteExecutor.java 21 Nov 2005 17:02:24 -0000 1.3 +++ MultiTableDeleteExecutor.java 10 Feb 2006 17:30:19 -0000 1.4 @@ -41,9 +41,10 @@ DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST(); FromElement fromElement = deleteStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); this.persister = fromElement.getQueryable(); - this.idInsertSelect = generateIdInsertSelect( persister, ( ( DeleteStatement ) walker.getAST() ).getWhereClause() ); + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() ); log.trace( "Generated ID-INSERT-SELECT SQL (multi-table delete) : " + idInsertSelect ); String[] tableNames = persister.getConstraintOrderedTableNameClosure(); Index: MultiTableUpdateExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/MultiTableUpdateExecutor.java,v retrieving revision 1.4 retrieving revision 1.5 diff -u -d -r1.4 -r1.5 --- MultiTableUpdateExecutor.java 21 Nov 2005 17:02:24 -0000 1.4 +++ MultiTableUpdateExecutor.java 10 Feb 2006 17:30:19 -0000 1.5 @@ -45,9 +45,10 @@ UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST(); FromElement fromElement = updateStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); this.persister = fromElement.getQueryable(); - this.idInsertSelect = generateIdInsertSelect( persister, updateStatement.getWhereClause() ); + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() ); log.trace( "Generated ID-INSERT-SELECT SQL (multi-table update) : " + idInsertSelect ); String[] tableNames = persister.getConstraintOrderedTableNameClosure(); |
From: <ste...@us...> - 2006-02-10 17:30:27
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/tree In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv3914/src/org/hibernate/hql/ast/tree Modified Files: FromElementType.java Log Message: HHH-1419 : bulk deletes/updates, subqueries and column qualifications Index: FromElementType.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/tree/FromElementType.java,v retrieving revision 1.5 retrieving revision 1.6 diff -u -d -r1.5 -r1.6 --- FromElementType.java 13 Jan 2006 04:02:10 -0000 1.5 +++ FromElementType.java 10 Feb 2006 17:30:19 -0000 1.6 @@ -311,6 +311,7 @@ // decide if we need to use table-alias qualification boolean useTableAlias = fromElement.getWalker().getStatementType() == HqlSqlTokenTypes.SELECT || fromElement.getWalker().getCurrentClauseType() == HqlSqlTokenTypes.SELECT + || fromElement.getWalker().isSubQuery() || forceAlias; if ( useTableAlias ) { return propertyMapping.toColumns( tableAlias, path ); |
From: <ste...@us...> - 2006-02-10 17:29:35
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/hql In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv3359/test/org/hibernate/test/hql Modified Files: Tag: Branch_3_1 BulkManipulationTest.java Log Message: HHH-1419 : bulk deletes/updates, subqueries and column qualifications Index: BulkManipulationTest.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/hql/BulkManipulationTest.java,v retrieving revision 1.50 retrieving revision 1.50.2.1 diff -u -d -r1.50 -r1.50.2.1 --- BulkManipulationTest.java 18 Jan 2006 06:33:26 -0000 1.50 +++ BulkManipulationTest.java 10 Feb 2006 17:29:26 -0000 1.50.2.1 @@ -3,6 +3,7 @@ import java.util.Date; import java.util.List; +import java.util.ArrayList; import junit.framework.Test; import junit.framework.TestSuite; @@ -373,6 +374,37 @@ // UPDATES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + public void testUpdateWithWhereExistsSubquery() { + Session s = openSession(); + Transaction t = s.beginTransaction(); + Human joe = new Human(); + joe.setName( new Name( "Joe", 'Q', "Public" ) ); + s.save( joe ); + Human doll = new Human(); + doll.setName( new Name( "Kyu", 'P', "Doll" ) ); + doll.setFriends( new ArrayList() ); + doll.getFriends().add( joe ); + s.save( doll ); + t.commit(); + s.close(); + + s = openSession(); + t = s.beginTransaction(); + String updateQryString = "update Human h " + + "set h.description = 'updated' " + + "where exists (" + + " select f.id " + + " from h.friends f " + + " where f.name.last = 'Public' " + + ")"; + int count = s.createQuery( updateQryString ).executeUpdate(); + assertEquals( 1, count ); + s.delete( doll ); + s.delete( joe ); + t.commit(); + s.close(); + } + public void testIncrementCounterVersion() { Session s = openSession(); Transaction t = s.beginTransaction(); |
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv2993/src/org/hibernate/hql/ast/exec Modified Files: Tag: Branch_3_1 AbstractStatementExecutor.java MultiTableDeleteExecutor.java MultiTableUpdateExecutor.java Log Message: HHH-1419 : bulk deletes/updates, subqueries and column qualifications Index: AbstractStatementExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/AbstractStatementExecutor.java,v retrieving revision 1.6 retrieving revision 1.6.2.1 diff -u -d -r1.6 -r1.6.2.1 --- AbstractStatementExecutor.java 20 Jan 2006 12:57:29 -0000 1.6 +++ AbstractStatementExecutor.java 10 Feb 2006 17:28:40 -0000 1.6.2.1 @@ -50,9 +50,8 @@ protected abstract Queryable[] getAffectedQueryables(); - protected String generateIdInsertSelect(Queryable persister, AST whereClause) { + protected String generateIdInsertSelect(Queryable persister, String tableAlias, AST whereClause) { Select select = new Select( getFactory().getDialect() ); - String tableAlias = "bulk_target"; SelectFragment selectFragment = new SelectFragment() .addColumns( tableAlias, persister.getIdentifierColumnNames(), persister.getIdentifierColumnNames() ); select.setSelectClause( selectFragment.toFragmentString().substring( 2 ) ); Index: MultiTableDeleteExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/MultiTableDeleteExecutor.java,v retrieving revision 1.3 retrieving revision 1.3.2.1 diff -u -d -r1.3 -r1.3.2.1 --- MultiTableDeleteExecutor.java 21 Nov 2005 17:02:24 -0000 1.3 +++ MultiTableDeleteExecutor.java 10 Feb 2006 17:28:41 -0000 1.3.2.1 @@ -41,9 +41,10 @@ DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST(); FromElement fromElement = deleteStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); this.persister = fromElement.getQueryable(); - this.idInsertSelect = generateIdInsertSelect( persister, ( ( DeleteStatement ) walker.getAST() ).getWhereClause() ); + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, deleteStatement.getWhereClause() ); log.trace( "Generated ID-INSERT-SELECT SQL (multi-table delete) : " + idInsertSelect ); String[] tableNames = persister.getConstraintOrderedTableNameClosure(); Index: MultiTableUpdateExecutor.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/exec/MultiTableUpdateExecutor.java,v retrieving revision 1.4 retrieving revision 1.4.2.1 diff -u -d -r1.4 -r1.4.2.1 --- MultiTableUpdateExecutor.java 21 Nov 2005 17:02:24 -0000 1.4 +++ MultiTableUpdateExecutor.java 10 Feb 2006 17:28:41 -0000 1.4.2.1 @@ -45,9 +45,10 @@ UpdateStatement updateStatement = ( UpdateStatement ) walker.getAST(); FromElement fromElement = updateStatement.getFromClause().getFromElement(); + String bulkTargetAlias = fromElement.getTableAlias(); this.persister = fromElement.getQueryable(); - this.idInsertSelect = generateIdInsertSelect( persister, updateStatement.getWhereClause() ); + this.idInsertSelect = generateIdInsertSelect( persister, bulkTargetAlias, updateStatement.getWhereClause() ); log.trace( "Generated ID-INSERT-SELECT SQL (multi-table update) : " + idInsertSelect ); String[] tableNames = persister.getConstraintOrderedTableNameClosure(); |
From: <ste...@us...> - 2006-02-10 17:28:50
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/tree In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv2993/src/org/hibernate/hql/ast/tree Modified Files: Tag: Branch_3_1 FromElementType.java Log Message: HHH-1419 : bulk deletes/updates, subqueries and column qualifications Index: FromElementType.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/hql/ast/tree/FromElementType.java,v retrieving revision 1.5 retrieving revision 1.5.2.1 diff -u -d -r1.5 -r1.5.2.1 --- FromElementType.java 13 Jan 2006 04:02:10 -0000 1.5 +++ FromElementType.java 10 Feb 2006 17:28:42 -0000 1.5.2.1 @@ -311,6 +311,7 @@ // decide if we need to use table-alias qualification boolean useTableAlias = fromElement.getWalker().getStatementType() == HqlSqlTokenTypes.SELECT || fromElement.getWalker().getCurrentClauseType() == HqlSqlTokenTypes.SELECT + || fromElement.getWalker().isSubQuery() || forceAlias; if ( useTableAlias ) { return propertyMapping.toColumns( tableAlias, path ); |
From: <epb...@us...> - 2006-02-10 16:33:12
|
Update of /cvsroot/hibernate/HibernateExt/metadata/src/java/org/hibernate/cfg In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7960/metadata/src/java/org/hibernate/cfg Modified Files: AnnotationBinder.java AnnotationConfiguration.java Log Message: ANN-234 validation has to be dones after the 2nd pass Index: AnnotationBinder.java =================================================================== RCS file: /cvsroot/hibernate/HibernateExt/metadata/src/java/org/hibernate/cfg/AnnotationBinder.java,v retrieving revision 1.171 retrieving revision 1.172 diff -u -d -r1.171 -r1.172 --- AnnotationBinder.java 26 Jan 2006 15:02:55 -0000 1.171 +++ AnnotationBinder.java 10 Feb 2006 16:32:55 -0000 1.172 @@ -13,6 +13,7 @@ import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.DiscriminatorType; +import javax.persistence.DiscriminatorValue; import javax.persistence.Embeddable; import javax.persistence.Embedded; import javax.persistence.EmbeddedId; @@ -42,7 +43,6 @@ import javax.persistence.SqlResultSetMapping; import javax.persistence.TableGenerator; import javax.persistence.Version; -import javax.persistence.DiscriminatorValue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -113,7 +113,6 @@ import org.hibernate.type.ForeignKeyDirection; import org.hibernate.type.TypeFactory; import org.hibernate.util.StringHelper; -import org.hibernate.validator.ClassValidator; /** * JSR 175 annotation binder @@ -666,9 +665,6 @@ entityBinder.addIndexes( annotatedClass.getAnnotation( org.hibernate.annotations.Table.class ) ); entityBinder.addIndexes( annotatedClass.getAnnotation( org.hibernate.annotations.Tables.class ) ); - //integrate the validate framework - // TODO: migrate the Validator to the X layer - new ClassValidator( clazzToProcess.toClass() ).apply( persistentClass ); } /** Index: AnnotationConfiguration.java =================================================================== RCS file: /cvsroot/hibernate/HibernateExt/metadata/src/java/org/hibernate/cfg/AnnotationConfiguration.java,v retrieving revision 1.48 retrieving revision 1.49 diff -u -d -r1.48 -r1.49 --- AnnotationConfiguration.java 25 Jan 2006 21:48:21 -0000 1.48 +++ AnnotationConfiguration.java 10 Feb 2006 16:32:55 -0000 1.49 @@ -12,6 +12,7 @@ import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; +import java.util.Collection; import javax.persistence.MappedSuperclass; import javax.persistence.Entity; @@ -22,14 +23,17 @@ import org.dom4j.Element; import org.hibernate.AnnotationException; import org.hibernate.MappingException; +import org.hibernate.validator.ClassValidator; import org.hibernate.mapping.Column; import org.hibernate.mapping.Join; import org.hibernate.mapping.Table; import org.hibernate.mapping.UniqueKey; +import org.hibernate.mapping.PersistentClass; import org.hibernate.reflection.ReflectionManager; import org.hibernate.reflection.XClass; import org.hibernate.util.JoinedIterator; import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; /** * Add JSR 175 configuration capability. @@ -107,7 +111,7 @@ * @return the configuration object */ public AnnotationConfiguration addAnnotatedClass(Class persistentClass) throws MappingException { - XClass persistentXClass = ReflectionManager.INSTANCE.toXClass( persistentClass ); + XClass persistentXClass = ReflectionManager.INSTANCE.toXClass( persistentClass ); try { if ( persistentXClass.isAnnotationPresent( Entity.class ) ) { annotatedClassEntities.put( persistentXClass.getName(), persistentXClass ); @@ -255,6 +259,19 @@ buildUniqueKeyFromColumnNames( columnNames, table, keyName ); } } + for (PersistentClass persistentClazz : ( Collection < PersistentClass > ) classes.values() ) { + //integrate the validate framework + // TODO: migrate the Validator to the X layer + String className = persistentClazz.getClassName(); + if ( StringHelper.isNotEmpty( className ) ) { + try { + new ClassValidator( ReflectHelper.classForName( className ) ).apply( persistentClazz ); + } + catch (ClassNotFoundException e) { + //swallow them + } + } + } } private void processArtifactsOfType(String artifact) { @@ -379,7 +396,7 @@ ncdf ); } - + addAnnotatedClass( loadedClass ); } else { |
From: <epb...@us...> - 2006-02-10 16:33:07
|
Update of /cvsroot/hibernate/HibernateExt/metadata/src/test/org/hibernate/validator/test In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7960/metadata/src/test/org/hibernate/validator/test Modified Files: HibernateAnnotationIntegrationTest.java Added Files: TvOwner.java Log Message: ANN-234 validation has to be dones after the 2nd pass --- NEW FILE: TvOwner.java --- //$Id: TvOwner.java,v 1.1 2006/02/10 16:32:56 epbernard Exp $ package org.hibernate.validator.test; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToOne; import org.hibernate.validator.NotNull; /** * @author Emmanuel Bernard */ @Entity public class TvOwner { @Id @GeneratedValue public Integer id; @ManyToOne @NotNull public Tv tv; } Index: HibernateAnnotationIntegrationTest.java =================================================================== RCS file: /cvsroot/hibernate/HibernateExt/metadata/src/test/org/hibernate/validator/test/HibernateAnnotationIntegrationTest.java,v retrieving revision 1.9 retrieving revision 1.10 diff -u -d -r1.9 -r1.10 --- HibernateAnnotationIntegrationTest.java 12 Jan 2006 02:54:50 -0000 1.9 +++ HibernateAnnotationIntegrationTest.java 10 Feb 2006 16:32:56 -0000 1.10 @@ -33,7 +33,13 @@ public void testApplyOnIdColumn() throws Exception { PersistentClass classMapping = getCfg().getClassMapping( Tv.class.getName() ); Column serialColumn = (Column) classMapping.getIdentifierProperty().getColumnIterator().next(); - assertEquals( serialColumn.getLength(), 2 ); + assertEquals( "Vaidator annotation not applied on ids", 2, serialColumn.getLength() ); + } + + public void testApplyOnManyToOne() throws Exception { + PersistentClass classMapping = getCfg().getClassMapping( TvOwner.class.getName() ); + Column serialColumn = (Column) classMapping.getProperty("tv").getColumnIterator().next(); + assertEquals( "Validator annotations not applied on associations", false, serialColumn.isNullable() ); } public void testEvents() throws Exception { @@ -144,7 +150,8 @@ Address.class, Martian.class, Venusian.class, - Tv.class + Tv.class, + TvOwner.class }; } |
From: <epb...@us...> - 2006-02-10 13:07:23
|
Update of /cvsroot/hibernate/HibernateExt/ejb/src/test/org/hibernate/ejb/test/ops In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv14290/ejb/src/test/org/hibernate/ejb/test/ops Added Files: MergeNewTest.java Workload.java Log Message: more tests on merge --- NEW FILE: MergeNewTest.java --- //$Id: MergeNewTest.java,v 1.1 2006/02/10 13:07:15 epbernard Exp $ package org.hibernate.ejb.test.ops; import javax.persistence.EntityManager; import org.hibernate.ejb.test.TestCase; /** * @author Emmanuel Bernard */ public class MergeNewTest extends TestCase { public void testMergeNew() throws Exception { Workload load = new Workload(); load.name = "Cleaning"; load.load = 10; EntityManager em = factory.createEntityManager(); em.getTransaction().begin(); load = em.merge( load ); assertNotNull( load.id ); em.flush(); assertNotNull( load.id ); em.getTransaction().rollback(); em.close(); } public Class[] getAnnotatedClasses() { return new Class[] { Workload.class }; } } --- NEW FILE: Workload.java --- //$Id: Workload.java,v 1.1 2006/02/10 13:07:15 epbernard Exp $ package org.hibernate.ejb.test.ops; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.GeneratedValue; /** * @author Emmanuel Bernard */ @Entity public class Workload { @Id @GeneratedValue public Integer id; public String name; public Integer load; } |
From: <max...@us...> - 2006-02-10 08:03:57
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/engine/query In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv674/src/org/hibernate/engine/query Modified Files: Tag: Branch_3_1 NativeSQLQueryPlan.java Log Message: imports Index: NativeSQLQueryPlan.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/engine/query/NativeSQLQueryPlan.java,v retrieving revision 1.2 retrieving revision 1.2.2.1 diff -u -d -r1.2 -r1.2.2.1 --- NativeSQLQueryPlan.java 7 Nov 2005 17:39:09 -0000 1.2 +++ NativeSQLQueryPlan.java 10 Feb 2006 08:03:49 -0000 1.2.2.1 @@ -1,11 +1,10 @@ package org.hibernate.engine.query; -import org.hibernate.loader.custom.SQLCustomQuery; -import org.hibernate.engine.SessionFactoryImplementor; -import org.hibernate.engine.NamedSQLQueryDefinition; - import java.io.Serializable; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.loader.custom.SQLCustomQuery; + /** * Defines a query execution plan for a native-SQL query. * |
From: <max...@us...> - 2006-02-10 08:03:57
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/engine In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv674/src/org/hibernate/engine Modified Files: Tag: Branch_3_1 EntityKey.java NamedSQLQueryDefinition.java Log Message: imports Index: EntityKey.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/engine/EntityKey.java,v retrieving revision 1.12 retrieving revision 1.12.2.1 diff -u -d -r1.12 -r1.12.2.1 --- EntityKey.java 1 Feb 2006 19:58:27 -0000 1.12 +++ EntityKey.java 10 Feb 2006 08:03:49 -0000 1.12.2.1 @@ -1,14 +1,13 @@ //$Id$ package org.hibernate.engine; -import java.io.Serializable; -import java.io.ObjectOutputStream; import java.io.IOException; import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; import org.hibernate.AssertionFailure; import org.hibernate.EntityMode; -import org.hibernate.util.SerializationHelper; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.pretty.MessageHelper; import org.hibernate.type.Type; Index: NamedSQLQueryDefinition.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/engine/NamedSQLQueryDefinition.java,v retrieving revision 1.18 retrieving revision 1.18.2.1 diff -u -d -r1.18 -r1.18.2.1 --- NamedSQLQueryDefinition.java 7 Nov 2005 17:39:09 -0000 1.18 +++ NamedSQLQueryDefinition.java 10 Feb 2006 08:03:49 -0000 1.18.2.1 @@ -4,9 +4,8 @@ import java.util.List; import java.util.Map; -import org.hibernate.FlushMode; import org.hibernate.CacheMode; -import org.hibernate.engine.query.NativeSQLQuerySpecification; +import org.hibernate.FlushMode; import org.hibernate.loader.custom.SQLQueryReturn; import org.hibernate.loader.custom.SQLQueryScalarReturn; |
From: <ste...@us...> - 2006-02-10 03:50:00
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv13047/src/org/hibernate/tool/hbm2ddl Modified Files: Tag: Branch_3_1 SchemaExport.java SchemaUpdate.java SchemaValidator.java Added Files: Tag: Branch_3_1 ConnectionHelper.java ManagedProviderConnectionHelper.java SuppliedConnectionHelper.java SuppliedConnectionProviderConnectionHelper.java Log Message: HHH-1445 : unified connection handling amongst core tools --- NEW FILE: ConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import java.sql.Connection; import java.sql.SQLException; /** * Contract for delegates responsible for managing connection used by the * hbm2ddl tools. * * @author Steve Ebersole */ interface ConnectionHelper { /** * Prepare the helper for use. * * @param needsAutoCommit Should connection be forced to auto-commit * if not already. * @throws SQLException */ public void prepare(boolean needsAutoCommit) throws SQLException; /** * Get a reference to the connection we are using. * * @return The JDBC connection. * @throws SQLException */ public Connection getConnection() throws SQLException; /** * Release any resources held by this helper. * * @throws SQLException */ public void release() throws SQLException; } --- NEW FILE: ManagedProviderConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.connection.ConnectionProvider; import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.util.JDBCExceptionReporter; import java.util.Properties; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on an internally * built and managed {@link ConnectionProvider}. * * @author Steve Ebersole */ class ManagedProviderConnectionHelper implements ConnectionHelper { private Properties cfgProperties; private ConnectionProvider connectionProvider; private Connection connection; public ManagedProviderConnectionHelper(Properties cfgProperties) { this.cfgProperties = cfgProperties; } public void prepare(boolean needsAutoCommit) throws SQLException { connectionProvider = ConnectionProviderFactory.newConnectionProvider( cfgProperties ); connection = connectionProvider.getConnection(); if ( needsAutoCommit && !connection.getAutoCommit() ) { connection.commit(); connection.setAutoCommit( true ); } } public Connection getConnection() throws SQLException { return connection; } public void release() throws SQLException { if ( connection != null ) { try { JDBCExceptionReporter.logAndClearWarnings( connection ); connectionProvider.closeConnection( connection ); } finally { connectionProvider.close(); } } connection = null; } } --- NEW FILE: SuppliedConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.util.JDBCExceptionReporter; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on an explicitly supplied * connection. * * @author Steve Ebersole */ class SuppliedConnectionHelper implements ConnectionHelper { private Connection connection; private boolean toggleAutoCommit; public SuppliedConnectionHelper(Connection connection) { this.connection = connection; } public void prepare(boolean needsAutoCommit) throws SQLException { toggleAutoCommit = needsAutoCommit && !connection.getAutoCommit(); if ( toggleAutoCommit ) { try { connection.commit(); } catch( Throwable ignore ) { // might happen with a managed connection } connection.setAutoCommit( true ); } } public Connection getConnection() { return connection; } public void release() throws SQLException { JDBCExceptionReporter.logAndClearWarnings( connection ); if ( toggleAutoCommit ) { connection.setAutoCommit( false ); } connection = null; } } --- NEW FILE: SuppliedConnectionProviderConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.connection.ConnectionProvider; import org.hibernate.util.JDBCExceptionReporter; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on a provided * {@link ConnectionProvider}. Essentially, ensures that the connection * gets cleaned up, but that the provider itself remains usable since it * was externally provided to us. * * @author Steve Ebersole */ class SuppliedConnectionProviderConnectionHelper implements ConnectionHelper { private ConnectionProvider provider; private Connection connection; private boolean toggleAutoCommit; public SuppliedConnectionProviderConnectionHelper(ConnectionProvider provider) { this.provider = provider; } public void prepare(boolean needsAutoCommit) throws SQLException { connection = provider.getConnection(); toggleAutoCommit = needsAutoCommit && !connection.getAutoCommit(); if ( toggleAutoCommit ) { try { connection.commit(); } catch( Throwable ignore ) { // might happen with a managed connection } connection.setAutoCommit( true ); } } public Connection getConnection() throws SQLException { return connection; } public void release() throws SQLException { // we only release the connection if ( connection != null ) { JDBCExceptionReporter.logAndClearWarnings( connection ); if ( toggleAutoCommit ) { connection.setAutoCommit( false ); } provider.closeConnection( connection ); connection = null; } } } Index: SchemaExport.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaExport.java,v retrieving revision 1.23 retrieving revision 1.23.2.1 diff -u -d -r1.23 -r1.23.2.1 --- SchemaExport.java 30 Oct 2005 10:59:20 -0000 1.23 +++ SchemaExport.java 10 Feb 2006 03:49:52 -0000 1.23.2.1 @@ -14,10 +14,8 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; import java.util.Properties; -import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -27,12 +25,9 @@ import org.hibernate.cfg.Environment; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.pretty.DDLFormatter; import org.hibernate.util.ConfigHelper; -import org.hibernate.util.JDBCExceptionReporter; import org.hibernate.util.PropertiesHelper; import org.hibernate.util.ReflectHelper; @@ -44,7 +39,7 @@ */ public class SchemaExport { - private static final Log log = LogFactory.getLog(SchemaExport.class); + private static final Log log = LogFactory.getLog( SchemaExport.class ); private ConnectionHelper connectionHelper; private String[] dropSQL; @@ -53,9 +48,9 @@ private String importFile = "/import.sql"; private Dialect dialect; private String delimiter; - private final List exceptions = new ArrayList(); - private boolean haltOnError = false; - private boolean format = true; + private final List exceptions = new ArrayList(); + private boolean haltOnError = false; + private boolean format = true; /** * Create a schema exporter for the given Configuration @@ -70,9 +65,9 @@ */ public SchemaExport(Configuration cfg, Settings settings) throws HibernateException { dialect = settings.getDialect(); - connectionHelper = new SuppliedConnectionProviderConnectionHelper( - settings.getConnectionProvider() - ); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); dropSQL = cfg.generateDropSchemaScript( dialect ); createSQL = cfg.generateSchemaCreationScript( dialect ); format = settings.isFormatSqlEnabled(); @@ -81,21 +76,21 @@ /** * Create a schema exporter for the given Configuration, with the given * database connection properties. - * + * * @deprecated properties may be specified via the Configuration object */ - public SchemaExport(Configuration cfg, Properties properties) - throws HibernateException { + public SchemaExport(Configuration cfg, Properties properties) + throws HibernateException { dialect = Dialect.getDialect( properties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); props.putAll( properties ); - connectionHelper = new ProviderConnectionHelper( props ); + connectionHelper = new ManagedProviderConnectionHelper( props ); dropSQL = cfg.generateDropSchemaScript( dialect ); createSQL = cfg.generateSchemaCreationScript( dialect ); - format = PropertiesHelper.getBoolean(Environment.FORMAT_SQL, props); + format = PropertiesHelper.getBoolean( Environment.FORMAT_SQL, props ); } public SchemaExport(Configuration cfg, Connection connection) { @@ -128,174 +123,188 @@ /** * Run the schema creation script. + * * @param script print the DDL to the console * @param export export the script to the database */ public void create(boolean script, boolean export) { - execute(script, export, false, false); + execute( script, export, false, false ); } /** * Run the drop schema script. + * * @param script print the DDL to the console * @param export export the script to the database */ public void drop(boolean script, boolean export) { - execute(script, export, true, false); + execute( script, export, true, false ); } - + private String format(String sql) { - return format ? - new DDLFormatter(sql).format() : - sql; + return format ? + new DDLFormatter( sql ).format() : + sql; } public void execute(boolean script, boolean export, boolean justDrop, boolean justCreate) { - log.info("Running hbm2ddl schema export"); + log.info( "Running hbm2ddl schema export" ); Connection connection = null; Writer outputFileWriter = null; Reader importFileReader = null; Statement statement = null; - exceptions.clear(); + exceptions.clear(); try { - - try - { - InputStream stream = ConfigHelper.getResourceAsStream(importFile); - importFileReader = new InputStreamReader(stream); + + try { + InputStream stream = ConfigHelper.getResourceAsStream( importFile ); + importFileReader = new InputStreamReader( stream ); } - catch (HibernateException e) - { - log.debug("import file not found: " + importFile); + catch ( HibernateException e ) { + log.debug( "import file not found: " + importFile ); } - if (outputFile != null) { - log.info("writing generated schema to file: " + outputFile); - outputFileWriter = new FileWriter(outputFile); + if ( outputFile != null ) { + log.info( "writing generated schema to file: " + outputFile ); + outputFileWriter = new FileWriter( outputFile ); } - if (export) { - log.info("exporting generated schema to database"); + if ( export ) { + log.info( "exporting generated schema to database" ); + connectionHelper.prepare( true ); connection = connectionHelper.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit(true); - } statement = connection.createStatement(); } - - if (!justCreate) { + + if ( !justCreate ) { drop( script, export, outputFileWriter, statement ); } - if (!justDrop) { + if ( !justDrop ) { create( script, export, outputFileWriter, statement ); - if (export && importFileReader!=null) { - importScript(importFileReader, statement); + if ( export && importFileReader != null ) { + importScript( importFileReader, statement ); } } - log.info("schema export complete"); + log.info( "schema export complete" ); } - catch(Exception e) { - exceptions.add(e); - log.error("schema export unsuccessful", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "schema export unsuccessful", e ); } finally { try { - if (statement!=null) statement.close(); - if (connection!=null) connectionHelper.release(); + if ( statement != null ) { + statement.close(); + } + if ( connection != null ) { + connectionHelper.release(); + } } - catch(Exception e) { - exceptions.add(e); + catch ( Exception e ) { + exceptions.add( e ); log.error( "Could not close connection", e ); } try { - if (outputFileWriter!=null) outputFileWriter.close(); - if (importFileReader!=null) importFileReader.close(); + if ( outputFileWriter != null ) { + outputFileWriter.close(); + } + if ( importFileReader != null ) { + importFileReader.close(); + } } - catch (IOException ioe) { - exceptions.add(ioe); + catch ( IOException ioe ) { + exceptions.add( ioe ); log.error( "Error closing output file: " + outputFile, ioe ); } } } - - private void importScript(Reader importFileReader, Statement statement) + + private void importScript(Reader importFileReader, Statement statement) throws IOException { - log.info("Executing import script: " + importFile); - BufferedReader reader = new BufferedReader(importFileReader); - for ( String sql = reader.readLine(); sql!=null; sql = reader.readLine() ) { + log.info( "Executing import script: " + importFile ); + BufferedReader reader = new BufferedReader( importFileReader ); + for ( String sql = reader.readLine(); sql != null; sql = reader.readLine() ) { try { String trimmedSql = sql.trim(); - if(trimmedSql.length()==0 || - trimmedSql.startsWith("--") || - trimmedSql.startsWith("//") || - trimmedSql.startsWith("/*")) { + if ( trimmedSql.length() == 0 || + trimmedSql.startsWith( "--" ) || + trimmedSql.startsWith( "//" ) || + trimmedSql.startsWith( "/*" ) ) { continue; - } else { - if(trimmedSql.endsWith(";")) { - trimmedSql = trimmedSql.substring(0,trimmedSql.length()-1); + } + else { + if ( trimmedSql.endsWith( ";" ) ) { + trimmedSql = trimmedSql.substring( 0, trimmedSql.length() - 1 ); } - log.debug(trimmedSql); - statement.execute(trimmedSql); + log.debug( trimmedSql ); + statement.execute( trimmedSql ); } } - catch (SQLException e) { - throw new JDBCException("Error during import script execution", e); - } + catch ( SQLException e ) { + throw new JDBCException( "Error during import script execution", e ); + } } } - private void create(boolean script, boolean export, Writer fileOutput, Statement statement) + private void create(boolean script, boolean export, Writer fileOutput, Statement statement) throws IOException { - for (int j = 0; j < createSQL.length; j++) { + for ( int j = 0; j < createSQL.length; j++ ) { try { execute( script, export, fileOutput, statement, createSQL[j] ); } - catch (SQLException e) { - if (haltOnError) { - throw new JDBCException("Error during DDL export", e); + catch ( SQLException e ) { + if ( haltOnError ) { + throw new JDBCException( "Error during DDL export", e ); } - exceptions.add(e); + exceptions.add( e ); log.error( "Unsuccessful: " + createSQL[j] ); log.error( e.getMessage() ); } } } - private void drop(boolean script, boolean export, Writer fileOutput, Statement statement) + private void drop(boolean script, boolean export, Writer fileOutput, Statement statement) throws IOException { - for (int i = 0; i < dropSQL.length; i++) { + for ( int i = 0; i < dropSQL.length; i++ ) { try { execute( script, export, fileOutput, statement, dropSQL[i] ); } - catch (SQLException e) { - exceptions.add(e); + catch ( SQLException e ) { + exceptions.add( e ); log.debug( "Unsuccessful: " + dropSQL[i] ); log.debug( e.getMessage() ); } } } - private void execute(boolean script, boolean export, Writer fileOutput, Statement statement, final String sql) + private void execute(boolean script, boolean export, Writer fileOutput, Statement statement, final String sql) throws IOException, SQLException { String formatted = format( sql ); - if (delimiter != null) formatted += delimiter; - if (script) System.out.println(formatted); - log.debug(formatted); - if (outputFile != null) fileOutput.write( formatted + "\n" ); - if (export) statement.executeUpdate( sql ); + if ( delimiter != null ) { + formatted += delimiter; + } + if ( script ) { + System.out.println( formatted ); + } + log.debug( formatted ); + if ( outputFile != null ) { + fileOutput.write( formatted + "\n" ); + } + if ( export ) { + statement.executeUpdate( sql ); + } } public static void main(String[] args) { @@ -313,185 +322,100 @@ boolean format = false; String delim = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].equals("--quiet") ) { + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].equals( "--quiet" ) ) { script = false; } - else if( args[i].equals("--drop") ) { + else if ( args[i].equals( "--drop" ) ) { drop = true; } - else if( args[i].equals("--create") ) { + else if ( args[i].equals( "--create" ) ) { create = true; } - else if( args[i].equals("--haltonerror") ) { + else if ( args[i].equals( "--haltonerror" ) ) { halt = true; } - else if( args[i].equals("--text") ) { + else if ( args[i].equals( "--text" ) ) { export = false; } - else if( args[i].startsWith("--output=") ) { - outFile = args[i].substring(9); + else if ( args[i].startsWith( "--output=" ) ) { + outFile = args[i].substring( 9 ); } - else if( args[i].startsWith("--import=") ) { - importFile = args[i].substring(9); + else if ( args[i].startsWith( "--import=" ) ) { + importFile = args[i].substring( 9 ); } - else if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + else if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if( args[i].equals("--format") ) { + else if ( args[i].equals( "--format" ) ) { format = true; } - else if ( args[i].startsWith("--delimiter=") ) { - delim = args[i].substring(12); + else if ( args[i].startsWith( "--delimiter=" ) ) { + delim = args[i].substring( 12 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ) + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ) .newInstance() - ); + ); } } else { String filename = args[i]; if ( filename.endsWith( ".jar" ) ) { - cfg.addJar( new File(filename) ); + cfg.addJar( new File( filename ) ); } else { - cfg.addFile(filename); + cfg.addFile( filename ); } } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - SchemaExport se = new SchemaExport(cfg) - .setHaltOnError(halt) - .setOutputFile(outFile) - .setImportFile(importFile) - .setDelimiter(delim); - if (format) se.setFormat(true); - se.execute(script, export, drop, create); - + SchemaExport se = new SchemaExport( cfg ) + .setHaltOnError( halt ) + .setOutputFile( outFile ) + .setImportFile( importFile ) + .setDelimiter( delim ); + if ( format ) { + se.setFormat( true ); + } + se.execute( script, export, drop, create ); + } - catch(Exception e) { + catch ( Exception e ) { log.error( "Error creating schema ", e ); e.printStackTrace(); } } - /** - * Returns a List of all Exceptions which occured during the export. - * @return A List containig the Exceptions occured during the export - */ - public List getExceptions() { - return exceptions; - } - - private interface ConnectionHelper { - Connection getConnection() throws SQLException; - void release() throws SQLException; - } - - private class SuppliedConnectionHelper implements ConnectionHelper { - private Connection connection; - - public SuppliedConnectionHelper(Connection connection) { - this.connection = connection; - } - - public Connection getConnection() { - return connection; - } - - public void release() { - JDBCExceptionReporter.logAndClearWarnings(connection); - connection = null; - } + /** + * Returns a List of all Exceptions which occured during the export. + * + * @return A List containig the Exceptions occured during the export + */ + public List getExceptions() { + return exceptions; } - private class SuppliedConnectionProviderConnectionHelper implements ConnectionHelper { - private ConnectionProvider provider; - private Connection connection; - - public SuppliedConnectionProviderConnectionHelper(ConnectionProvider provider) { - this.provider = provider; - } - - public Connection getConnection() throws SQLException { - if (connection == null) { - connection = provider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit( true ); - } - } - return connection; - } - - public void release() throws SQLException { - if (connection != null) { - JDBCExceptionReporter.logAndClearWarnings(connection); - provider.closeConnection( connection ); - connection = null; - } - } - } - public SchemaExport setFormat(boolean format) { this.format = format; return this; } - private class ProviderConnectionHelper implements ConnectionHelper { - private Properties cfgProperties; - private ConnectionProvider connectionProvider; - private Connection connection; - - public ProviderConnectionHelper(Properties cfgProperties) { - this.cfgProperties = cfgProperties; - } - - public Connection getConnection() throws SQLException { - if ( connection == null ) { - connectionProvider = ConnectionProviderFactory.newConnectionProvider( cfgProperties ); - connection = connectionProvider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit( true ); - } - } - return connection; - } - - public void release() throws SQLException { - if ( connection!=null ) { - JDBCExceptionReporter.logAndClearWarnings(connection); - connectionProvider.closeConnection( connection ); - connectionProvider.close(); - } - connection = null; - } - } - public SchemaExport setHaltOnError(boolean haltOnError) { this.haltOnError = haltOnError; return this; } } - - - - - - - Index: SchemaUpdate.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaUpdate.java,v retrieving revision 1.5 retrieving revision 1.5.2.1 diff -u -d -r1.5 -r1.5.2.1 --- SchemaUpdate.java 11 Aug 2005 20:41:21 -0000 1.5 +++ SchemaUpdate.java 10 Feb 2006 03:49:52 -0000 1.5.2.1 @@ -15,8 +15,6 @@ import org.hibernate.cfg.Configuration; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.util.ReflectHelper; @@ -28,11 +26,11 @@ */ public class SchemaUpdate { - private static final Log log = LogFactory.getLog(SchemaUpdate.class); - private ConnectionProvider connectionProvider; + private static final Log log = LogFactory.getLog( SchemaUpdate.class ); + private ConnectionHelper connectionHelper; private Configuration configuration; private Dialect dialect; - private List exceptions; + private List exceptions; public SchemaUpdate(Configuration cfg) throws HibernateException { this( cfg, cfg.getProperties() ); @@ -40,19 +38,21 @@ public SchemaUpdate(Configuration cfg, Properties connectionProperties) throws HibernateException { this.configuration = cfg; - dialect = Dialect.getDialect(connectionProperties); + dialect = Dialect.getDialect( connectionProperties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); - props.putAll(connectionProperties); - connectionProvider = ConnectionProviderFactory.newConnectionProvider(props); - exceptions = new ArrayList(); + props.putAll( connectionProperties ); + connectionHelper = new ManagedProviderConnectionHelper( props ); + exceptions = new ArrayList(); } public SchemaUpdate(Configuration cfg, Settings settings) throws HibernateException { this.configuration = cfg; dialect = settings.getDialect(); - connectionProvider = settings.getConnectionProvider(); - exceptions = new ArrayList(); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); + exceptions = new ArrayList(); } public static void main(String[] args) { @@ -64,42 +64,42 @@ boolean doUpdate = true; String propFile = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].equals("--quiet") ) { + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].equals( "--quiet" ) ) { script = false; } - else if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + else if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--text") ) { + else if ( args[i].startsWith( "--text" ) ) { doUpdate = false; } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ).newInstance() + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance() ); } } else { - cfg.addFile(args[i]); + cfg.addFile( args[i] ); } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - new SchemaUpdate(cfg).execute(script, doUpdate); + new SchemaUpdate( cfg ).execute( script, doUpdate ); } - catch (Exception e) { + catch ( Exception e ) { log.error( "Error running schema update", e ); e.printStackTrace(); } @@ -107,92 +107,85 @@ /** * Execute the schema updates + * * @param script print all DDL to the console */ public void execute(boolean script, boolean doUpdate) { - log.info("Running hbm2ddl schema update"); + log.info( "Running hbm2ddl schema update" ); - Connection connection=null; - Statement stmt=null; - boolean autoCommitWasEnabled = true; + Connection connection = null; + Statement stmt = null; - exceptions.clear(); + exceptions.clear(); try { DatabaseMetadata meta; try { - log.info("fetching database metadata"); - connection = connectionProvider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit(true); - autoCommitWasEnabled = false; - } - meta = new DatabaseMetadata(connection, dialect); + log.info( "fetching database metadata" ); + connectionHelper.prepare( true ); + connection = connectionHelper.getConnection(); + meta = new DatabaseMetadata( connection, dialect ); stmt = connection.createStatement(); } - catch (SQLException sqle) { - exceptions.add(sqle); - log.error("could not get database metadata", sqle); + catch ( SQLException sqle ) { + exceptions.add( sqle ); + log.error( "could not get database metadata", sqle ); throw sqle; } - log.info("updating schema"); + log.info( "updating schema" ); - String[] createSQL = configuration.generateSchemaUpdateScript(dialect, meta); - for (int j = 0; j < createSQL.length; j++) { + String[] createSQL = configuration.generateSchemaUpdateScript( dialect, meta ); + for ( int j = 0; j < createSQL.length; j++ ) { final String sql = createSQL[j]; try { - if (script) System.out.println(sql); - if (doUpdate) { - log.debug(sql); - stmt.executeUpdate(sql); + if ( script ) { + System.out.println( sql ); + } + if ( doUpdate ) { + log.debug( sql ); + stmt.executeUpdate( sql ); } } - catch (SQLException e) { - exceptions.add(e); + catch ( SQLException e ) { + exceptions.add( e ); log.error( "Unsuccessful: " + sql ); log.error( e.getMessage() ); } } - log.info("schema update complete"); + log.info( "schema update complete" ); } - catch (Exception e) { - exceptions.add(e); - log.error("could not complete schema update", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "could not complete schema update", e ); } finally { try { - if (stmt!=null) stmt.close(); - if (!autoCommitWasEnabled) connection.setAutoCommit(false); - if (connection!=null) connection.close(); - if (connectionProvider!=null) connectionProvider.close(); + if ( stmt != null ) { + stmt.close(); + } + connectionHelper.release(); } - catch (Exception e) { - exceptions.add(e); - log.error("Error closing connection", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "Error closing connection", e ); } } } - /** - * Returns a List of all Exceptions which occured during the export. - * @return A List containig the Exceptions occured during the export - */ - public List getExceptions() { - return exceptions; - } + /** + * Returns a List of all Exceptions which occured during the export. + * + * @return A List containig the Exceptions occured during the export + */ + public List getExceptions() { + return exceptions; + } } - - - - - - Index: SchemaValidator.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaValidator.java,v retrieving revision 1.3 retrieving revision 1.3.2.1 diff -u -d -r1.3 -r1.3.2.1 --- SchemaValidator.java 7 Oct 2005 08:35:44 -0000 1.3 +++ SchemaValidator.java 10 Feb 2006 03:49:52 -0000 1.3.2.1 @@ -12,8 +12,6 @@ import org.hibernate.cfg.Configuration; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.util.ReflectHelper; @@ -25,8 +23,8 @@ */ public class SchemaValidator { - private static final Log log = LogFactory.getLog(SchemaValidator.class); - private ConnectionProvider connectionProvider; + private static final Log log = LogFactory.getLog( SchemaValidator.class ); + private ConnectionHelper connectionHelper; private Configuration configuration; private Dialect dialect; @@ -36,17 +34,19 @@ public SchemaValidator(Configuration cfg, Properties connectionProperties) throws HibernateException { this.configuration = cfg; - dialect = Dialect.getDialect(connectionProperties); + dialect = Dialect.getDialect( connectionProperties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); - props.putAll(connectionProperties); - connectionProvider = ConnectionProviderFactory.newConnectionProvider(props); + props.putAll( connectionProperties ); + connectionHelper = new ManagedProviderConnectionHelper( props ); } public SchemaValidator(Configuration cfg, Settings settings) throws HibernateException { this.configuration = cfg; dialect = settings.getDialect(); - connectionProvider = settings.getConnectionProvider(); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); } public static void main(String[] args) { @@ -55,87 +55,80 @@ String propFile = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ).newInstance() + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance() ); } } else { - cfg.addFile(args[i]); + cfg.addFile( args[i] ); } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - - new SchemaValidator(cfg).validate(); + + new SchemaValidator( cfg ).validate(); } - catch (Exception e) { + catch ( Exception e ) { log.error( "Error running schema update", e ); e.printStackTrace(); } } /** - * Execute the schema updates - * @param script print all DDL to the console + * Perform the validations. */ public void validate() { - log.info("Running schema validator"); + log.info( "Running schema validator" ); - Connection connection=null; + Connection connection = null; try { DatabaseMetadata meta; try { - log.info("fetching database metadata"); - connection = connectionProvider.getConnection(); - meta = new DatabaseMetadata(connection, dialect, false); + log.info( "fetching database metadata" ); + connectionHelper.prepare( false ); + connection = connectionHelper.getConnection(); + meta = new DatabaseMetadata( connection, dialect, false ); } - catch (SQLException sqle) { - log.error("could not get database metadata", sqle); + catch ( SQLException sqle ) { + log.error( "could not get database metadata", sqle ); throw sqle; } - - configuration.validateSchema(dialect, meta); + + configuration.validateSchema( dialect, meta ); } - catch (SQLException e) { - log.error("could not complete schema validation", e); + catch ( SQLException e ) { + log.error( "could not complete schema validation", e ); } finally { try { - if (connection!=null) connection.close(); - if (connectionProvider!=null) connectionProvider.close(); + connectionHelper.release(); } - catch (Exception e) { - log.error("Error closing connection", e); + catch ( Exception e ) { + log.error( "Error closing connection", e ); } } } } - - - - - - |
From: <ste...@us...> - 2006-02-10 03:48:47
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv12646/src/org/hibernate/tool/hbm2ddl Modified Files: SchemaExport.java SchemaUpdate.java SchemaValidator.java Added Files: ConnectionHelper.java ManagedProviderConnectionHelper.java SuppliedConnectionHelper.java SuppliedConnectionProviderConnectionHelper.java Log Message: HHH-1445 : unified connection handling amongst core tools --- NEW FILE: ConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import java.sql.Connection; import java.sql.SQLException; /** * Contract for delegates responsible for managing connection used by the * hbm2ddl tools. * * @author Steve Ebersole */ interface ConnectionHelper { /** * Prepare the helper for use. * * @param needsAutoCommit Should connection be forced to auto-commit * if not already. * @throws SQLException */ public void prepare(boolean needsAutoCommit) throws SQLException; /** * Get a reference to the connection we are using. * * @return The JDBC connection. * @throws SQLException */ public Connection getConnection() throws SQLException; /** * Release any resources held by this helper. * * @throws SQLException */ public void release() throws SQLException; } --- NEW FILE: ManagedProviderConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.connection.ConnectionProvider; import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.util.JDBCExceptionReporter; import java.util.Properties; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on an internally * built and managed {@link ConnectionProvider}. * * @author Steve Ebersole */ class ManagedProviderConnectionHelper implements ConnectionHelper { private Properties cfgProperties; private ConnectionProvider connectionProvider; private Connection connection; public ManagedProviderConnectionHelper(Properties cfgProperties) { this.cfgProperties = cfgProperties; } public void prepare(boolean needsAutoCommit) throws SQLException { connectionProvider = ConnectionProviderFactory.newConnectionProvider( cfgProperties ); connection = connectionProvider.getConnection(); if ( needsAutoCommit && !connection.getAutoCommit() ) { connection.commit(); connection.setAutoCommit( true ); } } public Connection getConnection() throws SQLException { return connection; } public void release() throws SQLException { if ( connection != null ) { try { JDBCExceptionReporter.logAndClearWarnings( connection ); connectionProvider.closeConnection( connection ); } finally { connectionProvider.close(); } } connection = null; } } --- NEW FILE: SuppliedConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.util.JDBCExceptionReporter; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on an explicitly supplied * connection. * * @author Steve Ebersole */ class SuppliedConnectionHelper implements ConnectionHelper { private Connection connection; private boolean toggleAutoCommit; public SuppliedConnectionHelper(Connection connection) { this.connection = connection; } public void prepare(boolean needsAutoCommit) throws SQLException { toggleAutoCommit = needsAutoCommit && !connection.getAutoCommit(); if ( toggleAutoCommit ) { try { connection.commit(); } catch( Throwable ignore ) { // might happen with a managed connection } connection.setAutoCommit( true ); } } public Connection getConnection() { return connection; } public void release() throws SQLException { JDBCExceptionReporter.logAndClearWarnings( connection ); if ( toggleAutoCommit ) { connection.setAutoCommit( false ); } connection = null; } } --- NEW FILE: SuppliedConnectionProviderConnectionHelper.java --- package org.hibernate.tool.hbm2ddl; import org.hibernate.connection.ConnectionProvider; import org.hibernate.util.JDBCExceptionReporter; import java.sql.Connection; import java.sql.SQLException; /** * A {@link ConnectionHelper} implementation based on a provided * {@link ConnectionProvider}. Essentially, ensures that the connection * gets cleaned up, but that the provider itself remains usable since it * was externally provided to us. * * @author Steve Ebersole */ class SuppliedConnectionProviderConnectionHelper implements ConnectionHelper { private ConnectionProvider provider; private Connection connection; private boolean toggleAutoCommit; public SuppliedConnectionProviderConnectionHelper(ConnectionProvider provider) { this.provider = provider; } public void prepare(boolean needsAutoCommit) throws SQLException { connection = provider.getConnection(); toggleAutoCommit = needsAutoCommit && !connection.getAutoCommit(); if ( toggleAutoCommit ) { try { connection.commit(); } catch( Throwable ignore ) { // might happen with a managed connection } connection.setAutoCommit( true ); } } public Connection getConnection() throws SQLException { return connection; } public void release() throws SQLException { // we only release the connection if ( connection != null ) { JDBCExceptionReporter.logAndClearWarnings( connection ); if ( toggleAutoCommit ) { connection.setAutoCommit( false ); } provider.closeConnection( connection ); connection = null; } } } Index: SchemaExport.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaExport.java,v retrieving revision 1.23 retrieving revision 1.24 diff -u -d -r1.23 -r1.24 --- SchemaExport.java 30 Oct 2005 10:59:20 -0000 1.23 +++ SchemaExport.java 10 Feb 2006 03:48:38 -0000 1.24 @@ -14,10 +14,8 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; import java.util.Properties; -import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -27,12 +25,9 @@ import org.hibernate.cfg.Environment; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.pretty.DDLFormatter; import org.hibernate.util.ConfigHelper; -import org.hibernate.util.JDBCExceptionReporter; import org.hibernate.util.PropertiesHelper; import org.hibernate.util.ReflectHelper; @@ -44,7 +39,7 @@ */ public class SchemaExport { - private static final Log log = LogFactory.getLog(SchemaExport.class); + private static final Log log = LogFactory.getLog( SchemaExport.class ); private ConnectionHelper connectionHelper; private String[] dropSQL; @@ -53,9 +48,9 @@ private String importFile = "/import.sql"; private Dialect dialect; private String delimiter; - private final List exceptions = new ArrayList(); - private boolean haltOnError = false; - private boolean format = true; + private final List exceptions = new ArrayList(); + private boolean haltOnError = false; + private boolean format = true; /** * Create a schema exporter for the given Configuration @@ -70,9 +65,9 @@ */ public SchemaExport(Configuration cfg, Settings settings) throws HibernateException { dialect = settings.getDialect(); - connectionHelper = new SuppliedConnectionProviderConnectionHelper( - settings.getConnectionProvider() - ); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); dropSQL = cfg.generateDropSchemaScript( dialect ); createSQL = cfg.generateSchemaCreationScript( dialect ); format = settings.isFormatSqlEnabled(); @@ -81,21 +76,21 @@ /** * Create a schema exporter for the given Configuration, with the given * database connection properties. - * + * * @deprecated properties may be specified via the Configuration object */ - public SchemaExport(Configuration cfg, Properties properties) - throws HibernateException { + public SchemaExport(Configuration cfg, Properties properties) + throws HibernateException { dialect = Dialect.getDialect( properties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); props.putAll( properties ); - connectionHelper = new ProviderConnectionHelper( props ); + connectionHelper = new ManagedProviderConnectionHelper( props ); dropSQL = cfg.generateDropSchemaScript( dialect ); createSQL = cfg.generateSchemaCreationScript( dialect ); - format = PropertiesHelper.getBoolean(Environment.FORMAT_SQL, props); + format = PropertiesHelper.getBoolean( Environment.FORMAT_SQL, props ); } public SchemaExport(Configuration cfg, Connection connection) { @@ -128,174 +123,188 @@ /** * Run the schema creation script. + * * @param script print the DDL to the console * @param export export the script to the database */ public void create(boolean script, boolean export) { - execute(script, export, false, false); + execute( script, export, false, false ); } /** * Run the drop schema script. + * * @param script print the DDL to the console * @param export export the script to the database */ public void drop(boolean script, boolean export) { - execute(script, export, true, false); + execute( script, export, true, false ); } - + private String format(String sql) { - return format ? - new DDLFormatter(sql).format() : - sql; + return format ? + new DDLFormatter( sql ).format() : + sql; } public void execute(boolean script, boolean export, boolean justDrop, boolean justCreate) { - log.info("Running hbm2ddl schema export"); + log.info( "Running hbm2ddl schema export" ); Connection connection = null; Writer outputFileWriter = null; Reader importFileReader = null; Statement statement = null; - exceptions.clear(); + exceptions.clear(); try { - - try - { - InputStream stream = ConfigHelper.getResourceAsStream(importFile); - importFileReader = new InputStreamReader(stream); + + try { + InputStream stream = ConfigHelper.getResourceAsStream( importFile ); + importFileReader = new InputStreamReader( stream ); } - catch (HibernateException e) - { - log.debug("import file not found: " + importFile); + catch ( HibernateException e ) { + log.debug( "import file not found: " + importFile ); } - if (outputFile != null) { - log.info("writing generated schema to file: " + outputFile); - outputFileWriter = new FileWriter(outputFile); + if ( outputFile != null ) { + log.info( "writing generated schema to file: " + outputFile ); + outputFileWriter = new FileWriter( outputFile ); } - if (export) { - log.info("exporting generated schema to database"); + if ( export ) { + log.info( "exporting generated schema to database" ); + connectionHelper.prepare( true ); connection = connectionHelper.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit(true); - } statement = connection.createStatement(); } - - if (!justCreate) { + + if ( !justCreate ) { drop( script, export, outputFileWriter, statement ); } - if (!justDrop) { + if ( !justDrop ) { create( script, export, outputFileWriter, statement ); - if (export && importFileReader!=null) { - importScript(importFileReader, statement); + if ( export && importFileReader != null ) { + importScript( importFileReader, statement ); } } - log.info("schema export complete"); + log.info( "schema export complete" ); } - catch(Exception e) { - exceptions.add(e); - log.error("schema export unsuccessful", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "schema export unsuccessful", e ); } finally { try { - if (statement!=null) statement.close(); - if (connection!=null) connectionHelper.release(); + if ( statement != null ) { + statement.close(); + } + if ( connection != null ) { + connectionHelper.release(); + } } - catch(Exception e) { - exceptions.add(e); + catch ( Exception e ) { + exceptions.add( e ); log.error( "Could not close connection", e ); } try { - if (outputFileWriter!=null) outputFileWriter.close(); - if (importFileReader!=null) importFileReader.close(); + if ( outputFileWriter != null ) { + outputFileWriter.close(); + } + if ( importFileReader != null ) { + importFileReader.close(); + } } - catch (IOException ioe) { - exceptions.add(ioe); + catch ( IOException ioe ) { + exceptions.add( ioe ); log.error( "Error closing output file: " + outputFile, ioe ); } } } - - private void importScript(Reader importFileReader, Statement statement) + + private void importScript(Reader importFileReader, Statement statement) throws IOException { - log.info("Executing import script: " + importFile); - BufferedReader reader = new BufferedReader(importFileReader); - for ( String sql = reader.readLine(); sql!=null; sql = reader.readLine() ) { + log.info( "Executing import script: " + importFile ); + BufferedReader reader = new BufferedReader( importFileReader ); + for ( String sql = reader.readLine(); sql != null; sql = reader.readLine() ) { try { String trimmedSql = sql.trim(); - if(trimmedSql.length()==0 || - trimmedSql.startsWith("--") || - trimmedSql.startsWith("//") || - trimmedSql.startsWith("/*")) { + if ( trimmedSql.length() == 0 || + trimmedSql.startsWith( "--" ) || + trimmedSql.startsWith( "//" ) || + trimmedSql.startsWith( "/*" ) ) { continue; - } else { - if(trimmedSql.endsWith(";")) { - trimmedSql = trimmedSql.substring(0,trimmedSql.length()-1); + } + else { + if ( trimmedSql.endsWith( ";" ) ) { + trimmedSql = trimmedSql.substring( 0, trimmedSql.length() - 1 ); } - log.debug(trimmedSql); - statement.execute(trimmedSql); + log.debug( trimmedSql ); + statement.execute( trimmedSql ); } } - catch (SQLException e) { - throw new JDBCException("Error during import script execution", e); - } + catch ( SQLException e ) { + throw new JDBCException( "Error during import script execution", e ); + } } } - private void create(boolean script, boolean export, Writer fileOutput, Statement statement) + private void create(boolean script, boolean export, Writer fileOutput, Statement statement) throws IOException { - for (int j = 0; j < createSQL.length; j++) { + for ( int j = 0; j < createSQL.length; j++ ) { try { execute( script, export, fileOutput, statement, createSQL[j] ); } - catch (SQLException e) { - if (haltOnError) { - throw new JDBCException("Error during DDL export", e); + catch ( SQLException e ) { + if ( haltOnError ) { + throw new JDBCException( "Error during DDL export", e ); } - exceptions.add(e); + exceptions.add( e ); log.error( "Unsuccessful: " + createSQL[j] ); log.error( e.getMessage() ); } } } - private void drop(boolean script, boolean export, Writer fileOutput, Statement statement) + private void drop(boolean script, boolean export, Writer fileOutput, Statement statement) throws IOException { - for (int i = 0; i < dropSQL.length; i++) { + for ( int i = 0; i < dropSQL.length; i++ ) { try { execute( script, export, fileOutput, statement, dropSQL[i] ); } - catch (SQLException e) { - exceptions.add(e); + catch ( SQLException e ) { + exceptions.add( e ); log.debug( "Unsuccessful: " + dropSQL[i] ); log.debug( e.getMessage() ); } } } - private void execute(boolean script, boolean export, Writer fileOutput, Statement statement, final String sql) + private void execute(boolean script, boolean export, Writer fileOutput, Statement statement, final String sql) throws IOException, SQLException { String formatted = format( sql ); - if (delimiter != null) formatted += delimiter; - if (script) System.out.println(formatted); - log.debug(formatted); - if (outputFile != null) fileOutput.write( formatted + "\n" ); - if (export) statement.executeUpdate( sql ); + if ( delimiter != null ) { + formatted += delimiter; + } + if ( script ) { + System.out.println( formatted ); + } + log.debug( formatted ); + if ( outputFile != null ) { + fileOutput.write( formatted + "\n" ); + } + if ( export ) { + statement.executeUpdate( sql ); + } } public static void main(String[] args) { @@ -313,185 +322,100 @@ boolean format = false; String delim = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].equals("--quiet") ) { + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].equals( "--quiet" ) ) { script = false; } - else if( args[i].equals("--drop") ) { + else if ( args[i].equals( "--drop" ) ) { drop = true; } - else if( args[i].equals("--create") ) { + else if ( args[i].equals( "--create" ) ) { create = true; } - else if( args[i].equals("--haltonerror") ) { + else if ( args[i].equals( "--haltonerror" ) ) { halt = true; } - else if( args[i].equals("--text") ) { + else if ( args[i].equals( "--text" ) ) { export = false; } - else if( args[i].startsWith("--output=") ) { - outFile = args[i].substring(9); + else if ( args[i].startsWith( "--output=" ) ) { + outFile = args[i].substring( 9 ); } - else if( args[i].startsWith("--import=") ) { - importFile = args[i].substring(9); + else if ( args[i].startsWith( "--import=" ) ) { + importFile = args[i].substring( 9 ); } - else if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + else if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if( args[i].equals("--format") ) { + else if ( args[i].equals( "--format" ) ) { format = true; } - else if ( args[i].startsWith("--delimiter=") ) { - delim = args[i].substring(12); + else if ( args[i].startsWith( "--delimiter=" ) ) { + delim = args[i].substring( 12 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ) + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ) .newInstance() - ); + ); } } else { String filename = args[i]; if ( filename.endsWith( ".jar" ) ) { - cfg.addJar( new File(filename) ); + cfg.addJar( new File( filename ) ); } else { - cfg.addFile(filename); + cfg.addFile( filename ); } } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - SchemaExport se = new SchemaExport(cfg) - .setHaltOnError(halt) - .setOutputFile(outFile) - .setImportFile(importFile) - .setDelimiter(delim); - if (format) se.setFormat(true); - se.execute(script, export, drop, create); - + SchemaExport se = new SchemaExport( cfg ) + .setHaltOnError( halt ) + .setOutputFile( outFile ) + .setImportFile( importFile ) + .setDelimiter( delim ); + if ( format ) { + se.setFormat( true ); + } + se.execute( script, export, drop, create ); + } - catch(Exception e) { + catch ( Exception e ) { log.error( "Error creating schema ", e ); e.printStackTrace(); } } - /** - * Returns a List of all Exceptions which occured during the export. - * @return A List containig the Exceptions occured during the export - */ - public List getExceptions() { - return exceptions; - } - - private interface ConnectionHelper { - Connection getConnection() throws SQLException; - void release() throws SQLException; - } - - private class SuppliedConnectionHelper implements ConnectionHelper { - private Connection connection; - - public SuppliedConnectionHelper(Connection connection) { - this.connection = connection; - } - - public Connection getConnection() { - return connection; - } - - public void release() { - JDBCExceptionReporter.logAndClearWarnings(connection); - connection = null; - } + /** + * Returns a List of all Exceptions which occured during the export. + * + * @return A List containig the Exceptions occured during the export + */ + public List getExceptions() { + return exceptions; } - private class SuppliedConnectionProviderConnectionHelper implements ConnectionHelper { - private ConnectionProvider provider; - private Connection connection; - - public SuppliedConnectionProviderConnectionHelper(ConnectionProvider provider) { - this.provider = provider; - } - - public Connection getConnection() throws SQLException { - if (connection == null) { - connection = provider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit( true ); - } - } - return connection; - } - - public void release() throws SQLException { - if (connection != null) { - JDBCExceptionReporter.logAndClearWarnings(connection); - provider.closeConnection( connection ); - connection = null; - } - } - } - public SchemaExport setFormat(boolean format) { this.format = format; return this; } - private class ProviderConnectionHelper implements ConnectionHelper { - private Properties cfgProperties; - private ConnectionProvider connectionProvider; - private Connection connection; - - public ProviderConnectionHelper(Properties cfgProperties) { - this.cfgProperties = cfgProperties; - } - - public Connection getConnection() throws SQLException { - if ( connection == null ) { - connectionProvider = ConnectionProviderFactory.newConnectionProvider( cfgProperties ); - connection = connectionProvider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit( true ); - } - } - return connection; - } - - public void release() throws SQLException { - if ( connection!=null ) { - JDBCExceptionReporter.logAndClearWarnings(connection); - connectionProvider.closeConnection( connection ); - connectionProvider.close(); - } - connection = null; - } - } - public SchemaExport setHaltOnError(boolean haltOnError) { this.haltOnError = haltOnError; return this; } } - - - - - - - Index: SchemaUpdate.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaUpdate.java,v retrieving revision 1.5 retrieving revision 1.6 diff -u -d -r1.5 -r1.6 --- SchemaUpdate.java 11 Aug 2005 20:41:21 -0000 1.5 +++ SchemaUpdate.java 10 Feb 2006 03:48:38 -0000 1.6 @@ -15,8 +15,6 @@ import org.hibernate.cfg.Configuration; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.util.ReflectHelper; @@ -28,11 +26,11 @@ */ public class SchemaUpdate { - private static final Log log = LogFactory.getLog(SchemaUpdate.class); - private ConnectionProvider connectionProvider; + private static final Log log = LogFactory.getLog( SchemaUpdate.class ); + private ConnectionHelper connectionHelper; private Configuration configuration; private Dialect dialect; - private List exceptions; + private List exceptions; public SchemaUpdate(Configuration cfg) throws HibernateException { this( cfg, cfg.getProperties() ); @@ -40,19 +38,21 @@ public SchemaUpdate(Configuration cfg, Properties connectionProperties) throws HibernateException { this.configuration = cfg; - dialect = Dialect.getDialect(connectionProperties); + dialect = Dialect.getDialect( connectionProperties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); - props.putAll(connectionProperties); - connectionProvider = ConnectionProviderFactory.newConnectionProvider(props); - exceptions = new ArrayList(); + props.putAll( connectionProperties ); + connectionHelper = new ManagedProviderConnectionHelper( props ); + exceptions = new ArrayList(); } public SchemaUpdate(Configuration cfg, Settings settings) throws HibernateException { this.configuration = cfg; dialect = settings.getDialect(); - connectionProvider = settings.getConnectionProvider(); - exceptions = new ArrayList(); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); + exceptions = new ArrayList(); } public static void main(String[] args) { @@ -64,42 +64,42 @@ boolean doUpdate = true; String propFile = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].equals("--quiet") ) { + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].equals( "--quiet" ) ) { script = false; } - else if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + else if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--text") ) { + else if ( args[i].startsWith( "--text" ) ) { doUpdate = false; } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ).newInstance() + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance() ); } } else { - cfg.addFile(args[i]); + cfg.addFile( args[i] ); } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - new SchemaUpdate(cfg).execute(script, doUpdate); + new SchemaUpdate( cfg ).execute( script, doUpdate ); } - catch (Exception e) { + catch ( Exception e ) { log.error( "Error running schema update", e ); e.printStackTrace(); } @@ -107,92 +107,85 @@ /** * Execute the schema updates + * * @param script print all DDL to the console */ public void execute(boolean script, boolean doUpdate) { - log.info("Running hbm2ddl schema update"); + log.info( "Running hbm2ddl schema update" ); - Connection connection=null; - Statement stmt=null; - boolean autoCommitWasEnabled = true; + Connection connection = null; + Statement stmt = null; - exceptions.clear(); + exceptions.clear(); try { DatabaseMetadata meta; try { - log.info("fetching database metadata"); - connection = connectionProvider.getConnection(); - if ( !connection.getAutoCommit() ) { - connection.commit(); - connection.setAutoCommit(true); - autoCommitWasEnabled = false; - } - meta = new DatabaseMetadata(connection, dialect); + log.info( "fetching database metadata" ); + connectionHelper.prepare( true ); + connection = connectionHelper.getConnection(); + meta = new DatabaseMetadata( connection, dialect ); stmt = connection.createStatement(); } - catch (SQLException sqle) { - exceptions.add(sqle); - log.error("could not get database metadata", sqle); + catch ( SQLException sqle ) { + exceptions.add( sqle ); + log.error( "could not get database metadata", sqle ); throw sqle; } - log.info("updating schema"); + log.info( "updating schema" ); - String[] createSQL = configuration.generateSchemaUpdateScript(dialect, meta); - for (int j = 0; j < createSQL.length; j++) { + String[] createSQL = configuration.generateSchemaUpdateScript( dialect, meta ); + for ( int j = 0; j < createSQL.length; j++ ) { final String sql = createSQL[j]; try { - if (script) System.out.println(sql); - if (doUpdate) { - log.debug(sql); - stmt.executeUpdate(sql); + if ( script ) { + System.out.println( sql ); + } + if ( doUpdate ) { + log.debug( sql ); + stmt.executeUpdate( sql ); } } - catch (SQLException e) { - exceptions.add(e); + catch ( SQLException e ) { + exceptions.add( e ); log.error( "Unsuccessful: " + sql ); log.error( e.getMessage() ); } } - log.info("schema update complete"); + log.info( "schema update complete" ); } - catch (Exception e) { - exceptions.add(e); - log.error("could not complete schema update", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "could not complete schema update", e ); } finally { try { - if (stmt!=null) stmt.close(); - if (!autoCommitWasEnabled) connection.setAutoCommit(false); - if (connection!=null) connection.close(); - if (connectionProvider!=null) connectionProvider.close(); + if ( stmt != null ) { + stmt.close(); + } + connectionHelper.release(); } - catch (Exception e) { - exceptions.add(e); - log.error("Error closing connection", e); + catch ( Exception e ) { + exceptions.add( e ); + log.error( "Error closing connection", e ); } } } - /** - * Returns a List of all Exceptions which occured during the export. - * @return A List containig the Exceptions occured during the export - */ - public List getExceptions() { - return exceptions; - } + /** + * Returns a List of all Exceptions which occured during the export. + * + * @return A List containig the Exceptions occured during the export + */ + public List getExceptions() { + return exceptions; + } } - - - - - - Index: SchemaValidator.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/tool/hbm2ddl/SchemaValidator.java,v retrieving revision 1.3 retrieving revision 1.4 diff -u -d -r1.3 -r1.4 --- SchemaValidator.java 7 Oct 2005 08:35:44 -0000 1.3 +++ SchemaValidator.java 10 Feb 2006 03:48:38 -0000 1.4 @@ -12,8 +12,6 @@ import org.hibernate.cfg.Configuration; import org.hibernate.cfg.NamingStrategy; import org.hibernate.cfg.Settings; -import org.hibernate.connection.ConnectionProvider; -import org.hibernate.connection.ConnectionProviderFactory; import org.hibernate.dialect.Dialect; import org.hibernate.util.ReflectHelper; @@ -25,8 +23,8 @@ */ public class SchemaValidator { - private static final Log log = LogFactory.getLog(SchemaValidator.class); - private ConnectionProvider connectionProvider; + private static final Log log = LogFactory.getLog( SchemaValidator.class ); + private ConnectionHelper connectionHelper; private Configuration configuration; private Dialect dialect; @@ -36,17 +34,19 @@ public SchemaValidator(Configuration cfg, Properties connectionProperties) throws HibernateException { this.configuration = cfg; - dialect = Dialect.getDialect(connectionProperties); + dialect = Dialect.getDialect( connectionProperties ); Properties props = new Properties(); props.putAll( dialect.getDefaultProperties() ); - props.putAll(connectionProperties); - connectionProvider = ConnectionProviderFactory.newConnectionProvider(props); + props.putAll( connectionProperties ); + connectionHelper = new ManagedProviderConnectionHelper( props ); } public SchemaValidator(Configuration cfg, Settings settings) throws HibernateException { this.configuration = cfg; dialect = settings.getDialect(); - connectionProvider = settings.getConnectionProvider(); + connectionHelper = new SuppliedConnectionProviderConnectionHelper( + settings.getConnectionProvider() + ); } public static void main(String[] args) { @@ -55,87 +55,80 @@ String propFile = null; - for ( int i=0; i<args.length; i++ ) { - if( args[i].startsWith("--") ) { - if( args[i].startsWith("--properties=") ) { - propFile = args[i].substring(13); + for ( int i = 0; i < args.length; i++ ) { + if ( args[i].startsWith( "--" ) ) { + if ( args[i].startsWith( "--properties=" ) ) { + propFile = args[i].substring( 13 ); } - else if ( args[i].startsWith("--config=") ) { - cfg.configure( args[i].substring(9) ); + else if ( args[i].startsWith( "--config=" ) ) { + cfg.configure( args[i].substring( 9 ) ); } - else if ( args[i].startsWith("--naming=") ) { + else if ( args[i].startsWith( "--naming=" ) ) { cfg.setNamingStrategy( - (NamingStrategy) ReflectHelper.classForName( args[i].substring(9) ).newInstance() + ( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance() ); } } else { - cfg.addFile(args[i]); + cfg.addFile( args[i] ); } } - - if (propFile!=null) { + + if ( propFile != null ) { Properties props = new Properties(); props.putAll( cfg.getProperties() ); - props.load( new FileInputStream(propFile) ); - cfg.setProperties(props); + props.load( new FileInputStream( propFile ) ); + cfg.setProperties( props ); } - - new SchemaValidator(cfg).validate(); + + new SchemaValidator( cfg ).validate(); } - catch (Exception e) { + catch ( Exception e ) { log.error( "Error running schema update", e ); e.printStackTrace(); } } /** - * Execute the schema updates - * @param script print all DDL to the console + * Perform the validations. */ public void validate() { - log.info("Running schema validator"); + log.info( "Running schema validator" ); - Connection connection=null; + Connection connection = null; try { DatabaseMetadata meta; try { - log.info("fetching database metadata"); - connection = connectionProvider.getConnection(); - meta = new DatabaseMetadata(connection, dialect, false); + log.info( "fetching database metadata" ); + connectionHelper.prepare( false ); + connection = connectionHelper.getConnection(); + meta = new DatabaseMetadata( connection, dialect, false ); } - catch (SQLException sqle) { - log.error("could not get database metadata", sqle); + catch ( SQLException sqle ) { + log.error( "could not get database metadata", sqle ); throw sqle; } - - configuration.validateSchema(dialect, meta); + + configuration.validateSchema( dialect, meta ); } - catch (SQLException e) { - log.error("could not complete schema validation", e); + catch ( SQLException e ) { + log.error( "could not complete schema validation", e ); } finally { try { - if (connection!=null) connection.close(); - if (connectionProvider!=null) connectionProvider.close(); + connectionHelper.release(); } - catch (Exception e) { - log.error("Error closing connection", e); + catch ( Exception e ) { + log.error( "Error closing connection", e ); } } } } - - - - - - |
From: <ste...@us...> - 2006-02-09 20:51:46
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8740/test/org/hibernate/test Modified Files: AllTests.java Log Message: started an actual CacheProvider unit test suite Index: AllTests.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/AllTests.java,v retrieving revision 1.88 retrieving revision 1.89 diff -u -d -r1.88 -r1.89 --- AllTests.java 3 Feb 2006 22:36:16 -0000 1.88 +++ AllTests.java 9 Feb 2006 20:51:39 -0000 1.89 @@ -11,7 +11,7 @@ import org.hibernate.test.batchfetch.BatchFetchTest; import org.hibernate.test.bidi.AuctionTest; import org.hibernate.test.bidi.AuctionTest2; -import org.hibernate.test.cache.SecondLevelCacheTest; +import org.hibernate.test.cache.CacheSuite; import org.hibernate.test.cascade.RefreshTest; import org.hibernate.test.cid.CompositeIdTest; import org.hibernate.test.collection.CollectionTest; @@ -270,7 +270,7 @@ suite.addTest( ASTIteratorTest.suite() ); suite.addTest( HQLSuite.suite() ); suite.addTest( ASTUtilTest.suite() ); - suite.addTest( SecondLevelCacheTest.suite() ); + suite.addTest( CacheSuite.suite() ); suite.addTest( QueryCacheTest.suite() ); suite.addTest( CompositeUserTypeTest.suite() ); suite.addTest( TypeParameterTest.suite() ); |
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8585/test/org/hibernate/test/cache Modified Files: Item.hbm.xml Added Files: BaseCacheProviderTestCase.java CacheSuite.java VersionedItem.java Removed Files: SecondLevelCacheTest.java Log Message: started an actual CacheProvider unit test suite --- NEW FILE: BaseCacheProviderTestCase.java --- package org.hibernate.test.cache; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Environment; import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.stat.SecondLevelCacheStatistics; import org.hibernate.stat.Statistics; import org.hibernate.test.TestCase; import org.hibernate.test.tm.DummyConnectionProvider; import org.hibernate.test.tm.DummyTransactionManagerLookup; import org.hibernate.transaction.JDBCTransactionFactory; import java.util.Map; /** * Common requirement testing for each {@link org.hibernate.cache.CacheProvider} impl. * * @author Steve Ebersole */ public abstract class BaseCacheProviderTestCase extends TestCase { // note that a lot of the fucntionality here is intended to be used // in creating specific tests for each CacheProvider that would extend // from a base test case (this) for common requirement testing... public BaseCacheProviderTestCase(String x) { super( x ); } protected String[] getMappings() { return new String[] { "cache/Item.hbm.xml" }; } protected void configure(Configuration cfg) { super.configure( cfg ); cfg.setProperty( Environment.CACHE_REGION_PREFIX, "" ); cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" ); cfg.setProperty( Environment.GENERATE_STATISTICS, "true" ); cfg.setProperty( Environment.USE_STRUCTURED_CACHE, "true" ); cfg.setProperty( Environment.CACHE_PROVIDER, getCacheProvider().getName() ); if ( getConfigResourceKey() != null ) { cfg.setProperty( getConfigResourceKey(), getConfigResourceLocation() ); } if ( useTransactionManager() ) { cfg.setProperty( Environment.CONNECTION_PROVIDER, DummyConnectionProvider.class.getName() ); cfg.setProperty( Environment.TRANSACTION_MANAGER_STRATEGY, DummyTransactionManagerLookup.class.getName() ); } else { cfg.setProperty( Environment.TRANSACTION_MANAGER_STRATEGY, JDBCTransactionFactory.class.getName() ); } } /** * The cache provider to be tested. * * @return The cache provider. */ protected abstract Class getCacheProvider(); /** * For provider-specific configuration, the name of the property key the * provider expects. * * @return The provider-specific config key. */ protected abstract String getConfigResourceKey(); /** * For provider-specific configuration, the resource location of that * config resource. * * @return The config resource location. */ protected abstract String getConfigResourceLocation(); /** * Should we use a transaction manager for transaction management. * * @return True if we should use a RM; false otherwise. */ protected abstract boolean useTransactionManager(); public void testQueryCacheInvalidation() { Session s = openSession(); Transaction t = s.beginTransaction(); Item i = new Item(); i.setName("widget"); i.setDescription("A really top-quality, full-featured widget."); s.persist(i); t.commit(); s.close(); SecondLevelCacheStatistics slcs = s.getSessionFactory().getStatistics() .getSecondLevelCacheStatistics( Item.class.getName() ); assertEquals( slcs.getPutCount(), 1 ); assertEquals( slcs.getElementCountInMemory(), 1 ); assertEquals( slcs.getEntries().size(), 1 ); s = openSession(); t = s.beginTransaction(); i = (Item) s.get( Item.class, i.getId() ); assertEquals( slcs.getHitCount(), 1 ); assertEquals( slcs.getMissCount(), 0 ); i.setDescription("A bog standard item"); t.commit(); s.close(); assertEquals( slcs.getPutCount(), 2 ); Map map = (Map) slcs.getEntries().get( i.getId() ); assertTrue( map.get("description").equals("A bog standard item") ); assertTrue( map.get("name").equals("widget") ); // cleanup s = openSession(); t = s.beginTransaction(); s.delete( i ); t.commit(); s.close(); } public void testEmptySecondLevelCacheEntry() throws Exception { getSessions().evictEntity( Item.class.getName() ); Statistics stats = getSessions().getStatistics(); stats.clear(); SecondLevelCacheStatistics statistics = stats.getSecondLevelCacheStatistics( Item.class.getName() ); Map cacheEntries = statistics.getEntries(); assertEquals( 0, cacheEntries.size() ); } public void testStaleWritesLeaveCacheConsistent() { Session s = openSession(); Transaction txn = s.beginTransaction(); VersionedItem item = new VersionedItem(); item.setName( "steve" ); item.setDescription( "steve's item" ); s.save( item ); txn.commit(); s.close(); Long initialVersion = item.getVersion(); // manually revert the version property item.setVersion( new Long( item.getVersion().longValue() - 1 ) ); try { s = openSession(); txn = s.beginTransaction(); s.update( item ); txn.commit(); s.close(); fail( "expected stale write to fail" ); } catch( Throwable expected ) { // expected behavior here if ( txn != null ) { try { txn.rollback(); } catch( Throwable ignore ) { } } } finally { if ( s != null && s.isOpen() ) { try { s.close(); } catch( Throwable ignore ) { } } } // check the version value in the cache... SecondLevelCacheStatistics slcs = sfi().getStatistics() .getSecondLevelCacheStatistics( VersionedItem.class.getName() ); Map map = ( Map ) slcs.getEntries().get( item.getId() ); Long cachedVersionValue = ( Long ) map.get( "_version" ); assertEquals( initialVersion.longValue(), cachedVersionValue.longValue() ); // cleanup s = openSession(); txn = s.beginTransaction(); item = ( VersionedItem ) s.load( VersionedItem.class, item.getId() ); s.delete( item ); txn.commit(); s.close(); } protected SessionFactoryImplementor sfi() { return ( SessionFactoryImplementor ) getSessions(); } } --- NEW FILE: CacheSuite.java --- package org.hibernate.test.cache; import junit.framework.Test; import junit.framework.TestSuite; import org.hibernate.test.cache.treecache.optimistic.OptimisticTreeCacheTest; import org.hibernate.test.cache.treecache.pessimistic.TreeCacheTest; /** * @author Steve Ebersole */ public class CacheSuite { public static Test suite() { TestSuite suite = new TestSuite( "CacheProvider tests"); suite.addTest( OptimisticTreeCacheTest.suite() ); suite.addTest( TreeCacheTest.suite() ); return suite; } } --- NEW FILE: VersionedItem.java --- package org.hibernate.test.cache; /** * @author Steve Ebersole */ public class VersionedItem extends Item { private Long version; public Long getVersion() { return version; } public void setVersion(Long version) { this.version = version; } } Index: Item.hbm.xml =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/Item.hbm.xml,v retrieving revision 1.2 retrieving revision 1.3 diff -u -d -r1.2 -r1.3 --- Item.hbm.xml 16 May 2005 05:46:46 -0000 1.2 +++ Item.hbm.xml 9 Feb 2006 20:51:13 -0000 1.3 @@ -14,4 +14,13 @@ <property name="description" not-null="true"/> </class> + <class name="VersionedItem" table="VersionedItems"> + <id name="id"> + <generator class="increment"/> + </id> + <version name="version" type="long"/> + <property name="name" not-null="true"/> + <property name="description" not-null="true"/> + </class> + </hibernate-mapping> --- SecondLevelCacheTest.java DELETED --- |
From: <ste...@us...> - 2006-02-09 20:51:22
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/optimistic In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8585/test/org/hibernate/test/cache/treecache/optimistic Added Files: OptimisticTreeCacheTest.java treecache.xml Log Message: started an actual CacheProvider unit test suite --- NEW FILE: OptimisticTreeCacheTest.java --- package org.hibernate.test.cache.treecache.optimistic; import org.hibernate.test.cache.BaseCacheProviderTestCase; import org.hibernate.test.tm.DummyTransactionManager; import org.hibernate.cache.OptimisticTreeCacheProvider; import org.hibernate.engine.SessionFactoryImplementor; import org.jboss.cache.TreeCache; import org.jboss.cache.Fqn; import org.jboss.cache.optimistic.DataVersion; import org.jboss.cache.config.Option; import junit.framework.Test; import junit.framework.TestSuite; /** * @author Steve Ebersole */ public class OptimisticTreeCacheTest extends BaseCacheProviderTestCase { // note that a lot of the fucntionality here is intended to be used // in creating specific tests for each CacheProvider that would extend // from a base test case (this) for common requirement testing... public OptimisticTreeCacheTest(String x) { super( x ); } public static Test suite() { return new TestSuite( OptimisticTreeCacheTest.class ); } public String getCacheConcurrencyStrategy() { return "transactional"; } protected Class getCacheProvider() { return OptimisticTreeCacheProvider.class; } protected String getConfigResourceKey() { return OptimisticTreeCacheProvider.CONFIG_RESOURCE; } protected String getConfigResourceLocation() { return "org/hibernate/test/cache/treecache/optimistic/treecache.xml"; } protected boolean useTransactionManager() { return true; } public void testCacheLevelStaleWritesFail() throws Throwable { Fqn fqn = new Fqn( "whatever" ); TreeCache treeCache = ( ( OptimisticTreeCacheProvider ) sfi().getSettings().getCacheProvider() ).getUnderlyingCache(); Long long1 = new Long(1); Long long2 = new Long(2); try { System.out.println( "****************************************************************" ); DummyTransactionManager.INSTANCE.begin(); treeCache.put( fqn, "ITEM", long1, ManualDataVersion.gen( 1 ) ); DummyTransactionManager.INSTANCE.commit(); System.out.println( "****************************************************************" ); DummyTransactionManager.INSTANCE.begin(); treeCache.put( fqn, "ITEM", long2, ManualDataVersion.gen( 2 ) ); DummyTransactionManager.INSTANCE.commit(); try { System.out.println( "****************************************************************" ); DummyTransactionManager.INSTANCE.begin(); treeCache.put( fqn, "ITEM", long1, ManualDataVersion.gen( 1 ) ); DummyTransactionManager.INSTANCE.commit(); fail( "stale write allowed" ); } catch( Throwable ignore ) { // expected behavior DummyTransactionManager.INSTANCE.rollback(); } Long current = ( Long ) treeCache.get( fqn, "ITEM" ); assertEquals( "unexpected current value", 2, current.longValue() ); } finally { try { treeCache.remove( fqn, "ITEM" ); } catch( Throwable ignore ) { } } } private static class ManualDataVersion implements DataVersion { private final int version; public ManualDataVersion(int version) { this.version = version; } public boolean newerThan(DataVersion dataVersion) { return this.version > ( ( ManualDataVersion ) dataVersion ).version; } public static Option gen(int version) { ManualDataVersion mdv = new ManualDataVersion( version ); Option option = new Option(); option.setDataVersion( mdv ); return option; } } public SessionFactoryImplementor sfi() { return ( SessionFactoryImplementor ) getSessions(); } } --- NEW FILE: treecache.xml --- <?xml version="1.0" encoding="UTF-8"?> <!-- ===================================================================== --> <!-- --> <!-- Sample TreeCache Service Configuration --> <!-- Recommended for use as Hibernate's 2nd Level Cache --> <!-- For use with JBossCache >= 1.3.0 ONLY!!! --> <!-- --> <!-- ===================================================================== --> <server> <classpath codebase="./lib" archives="jboss-cache.jar, jgroups.jar"/> <!-- ==================================================================== --> <!-- Defines TreeCache configuration --> <!-- ==================================================================== --> <mbean code="org.jboss.cache.TreeCache" name="jboss.cache:service=TreeCache"> <depends>jboss:service=Naming</depends> <depends>jboss:service=TransactionManager</depends> <!-- Configure the TransactionManager : no matter since Hibernate will plug in an "adapter" to its own TransactionManagerLookup strategy here --> <attribute name="TransactionManagerLookupClass">org.jboss.cache.GenericTransactionManagerLookup</attribute> <!-- Node locking scheme: OPTIMISTIC PESSIMISTIC (default) --> <attribute name="NodeLockingScheme">OPTIMISTIC</attribute> <!-- Note that this attribute is IGNORED if your NodeLockingScheme above is OPTIMISTIC. Isolation level : SERIALIZABLE REPEATABLE_READ (default) READ_COMMITTED READ_UNCOMMITTED NONE --> <attribute name="IsolationLevel">REPEATABLE_READ</attribute> <!-- Valid modes are LOCAL REPL_ASYNC REPL_SYNC INVALIDATION_ASYNC INVALIDATION_SYNC INVALIDATION_ASYNC is highly recommended as the mode for use with clustered second-level caches. --> <attribute name="CacheMode">LOCAL</attribute> <!-- Just used for async repl: use a replication queue --> <attribute name="UseReplQueue">false</attribute> <!-- Replication interval for replication queue (in ms) --> <attribute name="ReplQueueInterval">0</attribute> <!-- Max number of elements which trigger replication --> <attribute name="ReplQueueMaxElements">0</attribute> <!-- Name of cluster. Needs to be the same for all clusters, in order to find each other --> <attribute name="ClusterName">TreeCache-Cluster</attribute> <!-- JGroups protocol stack properties. Can also be a URL, e.g. file:/home/bela/default.xml <attribute name="ClusterProperties"></attribute> --> <attribute name="ClusterConfig"> <config> <!-- UDP: if you have a multihomed machine, set the bind_addr attribute to the appropriate NIC IP address --> <!-- UDP: On Windows machines, because of the media sense feature being broken with multicast (even after disabling media sense) set the loopback attribute to true --> <UDP mcast_addr="228.1.2.3" mcast_port="48866" ip_ttl="64" ip_mcast="true" mcast_send_buf_size="150000" mcast_recv_buf_size="80000" ucast_send_buf_size="150000" ucast_recv_buf_size="80000" loopback="false"/> <PING timeout="2000" num_initial_members="3" up_thread="false" down_thread="false"/> <MERGE2 min_interval="10000" max_interval="20000"/> <!-- <FD shun="true" up_thread="true" down_thread="true" />--> <FD_SOCK/> <VERIFY_SUSPECT timeout="1500" up_thread="false" down_thread="false"/> <pbcast.NAKACK gc_lag="50" retransmit_timeout="600,1200,2400,4800" max_xmit_size="8192" up_thread="false" down_thread="false"/> <UNICAST timeout="600,1200,2400" window_size="100" min_threshold="10" down_thread="false"/> <pbcast.STABLE desired_avg_gossip="20000" up_thread="false" down_thread="false"/> <FRAG frag_size="8192" down_thread="false" up_thread="false"/> <pbcast.GMS join_timeout="5000" join_retry_timeout="2000" shun="true" print_local_addr="true"/> <pbcast.STATE_TRANSFER up_thread="true" down_thread="true"/> </config> </attribute> <!-- Whether or not to fetch state on joining a cluster NOTE this used to be called FetchStateOnStartup and has been renamed to be more descriptive. --> <attribute name="FetchInMemoryState">false</attribute> <!-- Number of milliseconds to wait until all responses for a synchronous call have been received. --> <attribute name="SyncReplTimeout">20000</attribute> <!-- Max number of milliseconds to wait for a lock acquisition --> <attribute name="LockAcquisitionTimeout">15000</attribute> <!-- Name of the eviction policy class. --> <attribute name="EvictionPolicyClass"></attribute> <!-- Indicate whether to use marshalling or not. Set this to true if you are running under a scoped class loader, e.g., inside an application server. Default is "false". --> <attribute name="UseMarshalling">false</attribute> </mbean> </server> |
From: <ste...@us...> - 2006-02-09 20:51:21
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/pessimistic In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8585/test/org/hibernate/test/cache/treecache/pessimistic Added Files: TreeCacheTest.java treecache.xml Log Message: started an actual CacheProvider unit test suite --- NEW FILE: TreeCacheTest.java --- package org.hibernate.test.cache.treecache.pessimistic; import org.hibernate.test.cache.BaseCacheProviderTestCase; import org.hibernate.cache.TreeCacheProvider; import junit.framework.Test; import junit.framework.TestSuite; /** * @author Steve Ebersole */ public class TreeCacheTest extends BaseCacheProviderTestCase { // note that a lot of the fucntionality here is intended to be used // in creating specific tests for each CacheProvider that would extend // from a base test case (this) for common requirement testing... public TreeCacheTest(String x) { super( x ); } public static Test suite() { return new TestSuite( TreeCacheTest.class ); } public String getCacheConcurrencyStrategy() { return "transactional"; } protected Class getCacheProvider() { return TreeCacheProvider.class; } protected String getConfigResourceKey() { return TreeCacheProvider.CONFIG_RESOURCE; } protected String getConfigResourceLocation() { return "org/hibernate/test/cache/treecache/pessimistic/treecache.xml"; } protected boolean useTransactionManager() { return true; } } --- NEW FILE: treecache.xml --- <?xml version="1.0" encoding="UTF-8"?> <!-- ===================================================================== --> <!-- --> <!-- Sample TreeCache Service Configuration --> <!-- --> <!-- ===================================================================== --> <server> <classpath codebase="./lib" archives="jboss-cache.jar, jgroups.jar"/> <!-- ==================================================================== --> <!-- Defines TreeCache configuration --> <!-- ==================================================================== --> <mbean code="org.jboss.cache.TreeCache" name="jboss.cache:service=TreeCache"> <depends>jboss:service=Naming</depends> <depends>jboss:service=TransactionManager</depends> <!-- TransactionManager configuration not required for Hibernate! --> <!-- Node isolation level : SERIALIZABLE REPEATABLE_READ (default) READ_COMMITTED READ_UNCOMMITTED NONE --> <attribute name="IsolationLevel">REPEATABLE_READ</attribute> <!-- Valid modes are LOCAL REPL_ASYNC REPL_SYNC --> <attribute name="CacheMode">LOCAL</attribute> <!-- Name of cluster. Needs to be the same for all clusters, in order to find each other --> <attribute name="ClusterName">TreeCache-Cluster</attribute> <!-- JGroups protocol stack properties. Can also be a URL, e.g. file:/home/bela/default.xml <attribute name="ClusterProperties"></attribute> --> <attribute name="ClusterConfig"> <config> <!-- UDP: if you have a multihomed machine, set the bind_addr attribute to the appropriate NIC IP address --> <!-- UDP: On Windows machines, because of the media sense feature being broken with multicast (even after disabling media sense) set the loopback attribute to true --> <UDP mcast_addr="228.1.2.3" mcast_port="45566" ip_ttl="64" ip_mcast="true" mcast_send_buf_size="150000" mcast_recv_buf_size="80000" ucast_send_buf_size="150000" ucast_recv_buf_size="80000" loopback="false"/> <PING timeout="2000" num_initial_members="3" up_thread="false" down_thread="false"/> <MERGE2 min_interval="10000" max_interval="20000"/> <FD shun="true" up_thread="true" down_thread="true"/> <VERIFY_SUSPECT timeout="1500" up_thread="false" down_thread="false"/> <pbcast.NAKACK gc_lag="50" retransmit_timeout="600,1200,2400,4800" up_thread="false" down_thread="false"/> <pbcast.STABLE desired_avg_gossip="20000" up_thread="false" down_thread="false"/> <UNICAST timeout="600,1200,2400" window_size="100" min_threshold="10" down_thread="false"/> <FRAG frag_size="8192" down_thread="false" up_thread="false"/> <pbcast.GMS join_timeout="5000" join_retry_timeout="2000" shun="true" print_local_addr="true"/> <pbcast.STATE_TRANSFER up_thread="false" down_thread="false"/> </config> </attribute> <!-- Max number of entries in the cache. If this is exceeded, the eviction policy will kick some entries out in order to make more room --> <attribute name="MaxCapacity">20000</attribute> <!-- The max amount of time (in milliseconds) we wait until the initial state (ie. the contents of the cache) are retrieved from existing members in a clustered environment --> <attribute name="InitialStateRetrievalTimeout">20000</attribute> <!-- Number of milliseconds to wait until all responses for a synchronous call have been received. --> <attribute name="SyncReplTimeout">10000</attribute> <!-- Max number of milliseconds to wait for a lock acquisition --> <attribute name="LockAcquisitionTimeout">15000</attribute> <!-- Max number of milliseconds we hold a lock (not currently implemented) --> <attribute name="LockLeaseTimeout">60000</attribute> <!-- Name of the eviction policy class. Not supported now. --> <attribute name="EvictionPolicyClass"></attribute> </mbean> </server> |
From: <ste...@us...> - 2006-02-09 20:49:53
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/optimistic In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7814/test/org/hibernate/test/cache/treecache/optimistic Log Message: Directory /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/optimistic added to the repository |
From: <ste...@us...> - 2006-02-09 20:49:53
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/pessimistic In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7814/test/org/hibernate/test/cache/treecache/pessimistic Log Message: Directory /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache/pessimistic added to the repository |
From: <ste...@us...> - 2006-02-09 20:49:48
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7799/test/org/hibernate/test/cache/treecache Log Message: Directory /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/cache/treecache added to the repository |
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7349/src/org/hibernate/cache Modified Files: CacheConcurrencyStrategy.java NonstrictReadWriteCache.java ReadOnlyCache.java ReadWriteCache.java TransactionalCache.java TreeCacheProvider.java Added Files: OptimisticCache.java OptimisticCacheSource.java OptimisticTreeCache.java OptimisticTreeCacheProvider.java Log Message: HHH-1457 : JBossCache 1.3.0 optimistic locking support --- NEW FILE: OptimisticCache.java --- package org.hibernate.cache; /** * A contract for transactional cache implementations which support * optimistic locking of items within the cache. * <p/> * The optimisitic locking capabilities are only utilized for * the entity cache regions. * <p/> * Unlike the methods on the {@link Cache} interface, all the methods * here will only ever be called from access scenarios where versioned * data is actually a possiblity (i.e., entity data). Be sure to consult * with {@link OptimisticCacheSource#isVersioned()} to determine whether * versioning is actually in effect. * * @author Steve Ebersole */ public interface OptimisticCache extends Cache { /** * Indicates the "source" of the cached data. Currently this will * only ever represent an {@link org.hibernate.persister.entity.EntityPersister}. * <p/> * Made available to the cache so that it can access certain information * about versioning strategy. * * @param source The source. */ public void setSource(OptimisticCacheSource source); /** * Called during {@link CacheConcurrencyStrategy#insert} processing for * transactional strategies. Indicates we have just performed an insert * into the DB and now need to cache that entity's data. * * @param key The cache key. * @param value The data to be cached. * @param currentVersion The entity's version; or null if not versioned. */ public void writeInsert(Object key, Object value, Object currentVersion); /** * Called during {@link CacheConcurrencyStrategy#update} processing for * transactional strategies. Indicates we have just performed an update * against the DB and now need to cache the updated state. * * @param key The cache key. * @param value The data to be cached. * @param currentVersion The entity's current version * @param previousVersion The entity's previous version (before the update); * or null if not versioned. */ public void writeUpdate(Object key, Object value, Object currentVersion, Object previousVersion); /** * Called during {@link CacheConcurrencyStrategy#put} processing for * transactional strategies. Indicates we have just loaded an entity's * state from the database and need it cached. * * @param key The cache key. * @param value The data to be cached. * @param currentVersion The entity's version; or null if not versioned. */ public void writeLoad(Object key, Object value, Object currentVersion); } --- NEW FILE: OptimisticCacheSource.java --- package org.hibernate.cache; import java.util.Comparator; /** * Contract for sources of optimistically lockable data sent to the second level * cache. * <p/> * Note currently {@link org.hibernate.persister.entity.EntityPersister}s are * the only viable source. * * @author Steve Ebersole */ public interface OptimisticCacheSource { /** * Does this source represent versioned (i.e., and thus optimistically * lockable) data? * * @return True if this source represents versioned data; false otherwise. */ public boolean isVersioned(); /** * Get the comparator used to compare two different version values together. * * @return An appropriate comparator. */ public Comparator getVersionComparator(); } --- NEW FILE: OptimisticTreeCache.java --- //$Id: OptimisticTreeCache.java,v 1.1 2006/02/09 20:48:42 steveebersole Exp $ package org.hibernate.cache; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.Comparator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.cache.Fqn; import org.jboss.cache.optimistic.DataVersion; import org.jboss.cache.config.Option; import org.jboss.cache.lock.TimeoutException; /** * Represents a particular region within the given JBossCache TreeCache * utilizing TreeCache's optimistic locking capabilities. * * @see OptimisticTreeCacheProvider for more details * * @author Steve Ebersole */ public class OptimisticTreeCache implements OptimisticCache { // todo : eventually merge this with TreeCache and just add optional opt-lock support there. private static final Log log = LogFactory.getLog( OptimisticTreeCache.class); private static final String ITEM = "item"; private org.jboss.cache.TreeCache cache; private final String regionName; private final String userRegionName; private OptimisticCacheSource source; public OptimisticTreeCache(org.jboss.cache.TreeCache cache, String regionName) throws CacheException { this.cache = cache; userRegionName = regionName; this.regionName = regionName.replace('.', '/'); } // OptimisticCache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public void setSource(OptimisticCacheSource source) { this.source = source; } public void writeInsert(Object key, Object value, Object currentVersion) { writeUpdate( key, value, currentVersion, null ); } public void writeUpdate(Object key, Object value, Object currentVersion, Object previousVersion) { try { Option option = null; if ( source != null ) { if ( source.isVersioned() ) { option = new Option(); option.setDataVersion( new DataVersionAdapter( currentVersion, previousVersion, source.getVersionComparator() ) ); } } cache.put( new Fqn( new Object[] { regionName, key } ), ITEM, value, option ); } catch (Exception e) { throw new CacheException(e); } } public void writeLoad(Object key, Object value, Object currentVersion) { try { Option option = new Option(); option.setFailSilently( true ); cache.remove( new Fqn( new Object[] { regionName, key } ), "ITEM", option ); if ( source != null ) { if ( source.isVersioned() ) { option.setDataVersion( new DataVersionAdapter( currentVersion, null, source.getVersionComparator() ) ); } } cache.put( new Fqn( new Object[] { regionName, key } ), ITEM, value, option ); } catch (Exception e) { throw new CacheException(e); } } // Cache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public Object get(Object key) throws CacheException { try { Option option = new Option(); option.setFailSilently( true ); return cache.get( new Fqn( new Object[] { regionName, key } ), ITEM, option ); } catch (Exception e) { throw new CacheException(e); } } public Object read(Object key) throws CacheException { try { return cache.get( new Fqn( new Object[] { regionName, key } ), ITEM ); } catch (Exception e) { throw new CacheException(e); } } public void update(Object key, Object value) throws CacheException { try { cache.put( new Fqn( new Object[] { regionName, key } ), ITEM, value ); } catch (Exception e) { throw new CacheException(e); } } public void put(Object key, Object value) throws CacheException { try { // do the put outside the scope of the JTA txn Option option = new Option(); option.setFailSilently( true ); cache.put( new Fqn( new Object[] { regionName, key } ), ITEM, value, option ); } catch (TimeoutException te) { //ignore! log.debug("ignoring write lock acquisition failure"); } catch (Exception e) { throw new CacheException(e); } } public void remove(Object key) throws CacheException { try { cache.remove( new Fqn( new Object[] { regionName, key } ) ); } catch (Exception e) { throw new CacheException(e); } } public void clear() throws CacheException { try { cache.remove( new Fqn(regionName) ); } catch (Exception e) { throw new CacheException(e); } } public void destroy() throws CacheException { clear(); } public void lock(Object key) throws CacheException { throw new UnsupportedOperationException("TreeCache is a fully transactional cache" + regionName); } public void unlock(Object key) throws CacheException { throw new UnsupportedOperationException("TreeCache is a fully transactional cache: " + regionName); } public long nextTimestamp() { return System.currentTimeMillis() / 100; } public int getTimeout() { return 600; //60 seconds } public String getRegionName() { return userRegionName; } public long getSizeInMemory() { return -1; } public long getElementCountInMemory() { try { Set children = cache.getChildrenNames( new Fqn(regionName) ); return children == null ? 0 : children.size(); } catch (Exception e) { throw new CacheException(e); } } public long getElementCountOnDisk() { return 0; } public Map toMap() { try { Map result = new HashMap(); Set childrenNames = cache.getChildrenNames( new Fqn(regionName) ); if (childrenNames != null) { Iterator iter = childrenNames.iterator(); while ( iter.hasNext() ) { Object key = iter.next(); result.put( key, cache.get( new Fqn( new Object[] { regionName, key } ), ITEM ) ); } } return result; } catch (Exception e) { throw new CacheException(e); } } public String toString() { return "OptimisticTreeCache(" + userRegionName + ')'; } public static class DataVersionAdapter implements DataVersion { private final Object currentVersion; private final Object previousVersion; private final Comparator versionComparator; public DataVersionAdapter(Object currentVersion, Object previousVersion, Comparator versionComparator) { this.currentVersion = currentVersion; this.previousVersion = previousVersion; this.versionComparator = versionComparator; } public boolean newerThan(DataVersion dataVersion) { if ( previousVersion == null ) { log.warn( "Unexpected optimistic lock check on inserted data" ); } Object other = ( ( DataVersionAdapter ) dataVersion ).currentVersion; return versionComparator.compare( previousVersion, other ) > 1; } } } --- NEW FILE: OptimisticTreeCacheProvider.java --- //$Id: OptimisticTreeCacheProvider.java,v 1.1 2006/02/09 20:48:42 steveebersole Exp $ package org.hibernate.cache; import org.jboss.cache.PropertyConfigurator; import org.hibernate.transaction.TransactionManagerLookup; import org.hibernate.transaction.TransactionManagerLookupFactory; import org.hibernate.cfg.Environment; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.transaction.TransactionManager; import java.util.Properties; /** * Support for a standalone JBossCache TreeCache instance utilizing TreeCache's * optimistic locking capabilities. This capability was added in JBossCache * version 1.3.0; as such this provider will only work with that version or * higher. * <p/> * The TreeCache instance is configured via a local config resource. The * resource to be used for configuration can be controlled by specifying a value * for the {@link #CONFIG_RESOURCE} config property. * * @author Steve Ebersole */ public class OptimisticTreeCacheProvider implements CacheProvider { public static final String CONFIG_RESOURCE = "hibernate.cache.opt_tree_cache.config"; public static final String DEFAULT_CONFIG = "treecache.xml"; private static final String NODE_LOCKING_SCHEME = "OPTIMISTIC"; private static final Log log = LogFactory.getLog( OptimisticTreeCacheProvider.class ); private org.jboss.cache.TreeCache cache; /** * Construct and configure the Cache representation of a named cache region. * * @param regionName the name of the cache region * @param properties configuration settings * @return The Cache representation of the named cache region. * @throws CacheException * Indicates an error building the cache region. */ public Cache buildCache(String regionName, Properties properties) throws CacheException { return new OptimisticTreeCache( cache, regionName ); } public long nextTimestamp() { return System.currentTimeMillis() / 100; } /** * Prepare the underlying JBossCache TreeCache instance. * * @param properties All current config settings. * @throws CacheException * Indicates a problem preparing cache for use. */ public void start(Properties properties) { String resource = properties.getProperty( CONFIG_RESOURCE ); if ( resource == null ) { resource = DEFAULT_CONFIG; } log.debug( "Configuring TreeCache from resource [" + resource + "]" ); try { cache = new org.jboss.cache.TreeCache(); PropertyConfigurator config = new PropertyConfigurator(); config.configure( cache, resource ); TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup( properties ); if ( transactionManagerLookup == null ) { throw new CacheException( "JBossCache only supports optimisitc locking with a configured " + "TransactionManagerLookup (" + Environment.TRANSACTION_MANAGER_STRATEGY + ")" ); } cache.setTransactionManagerLookup( new TransactionManagerLookupAdaptor( transactionManagerLookup, properties ) ); if ( ! NODE_LOCKING_SCHEME.equalsIgnoreCase( cache.getNodeLockingScheme() ) ) { log.info( "Overriding node-locking-scheme to : " + NODE_LOCKING_SCHEME ); cache.setNodeLockingScheme( NODE_LOCKING_SCHEME ); } cache.start(); } catch ( Exception e ) { throw new CacheException( e ); } } public void stop() { if ( cache != null ) { cache.stop(); cache.destroy(); cache = null; } } public boolean isMinimalPutsEnabledByDefault() { return true; } static final class TransactionManagerLookupAdaptor implements org.jboss.cache.TransactionManagerLookup { private final TransactionManagerLookup tml; private final Properties props; TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) { this.tml = tml; this.props = props; } public TransactionManager getTransactionManager() throws Exception { return tml.getTransactionManager( props ); } } public org.jboss.cache.TreeCache getUnderlyingCache() { return cache; } } Index: CacheConcurrencyStrategy.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/CacheConcurrencyStrategy.java,v retrieving revision 1.7 retrieving revision 1.8 diff -u -d -r1.7 -r1.8 --- CacheConcurrencyStrategy.java 12 Feb 2005 07:19:08 -0000 1.7 +++ CacheConcurrencyStrategy.java 9 Feb 2006 20:48:42 -0000 1.8 @@ -6,37 +6,53 @@ /** * Implementors manage transactional access to cached data. Transactions * pass in a timestamp indicating transaction start time. Two different - * implementation patterns are provided for. A transaction-aware cache - * implementation might be wrapped by a "synchronous" concurrency strategy, - * where updates to the cache are written to the cache inside the transaction. - * A non transaction-aware cache would be wrapped by an "asynchronous" + * implementation patterns are provided for.<ul> + * <li>A transaction-aware cache implementation might be wrapped by a + * "synchronous" concurrency strategy, where updates to the cache are written + * to the cache inside the transaction.</li> + * <li>A non transaction-aware cache would be wrapped by an "asynchronous" * concurrency strategy, where items are merely "soft locked" during the * transaction and then updated during the "after transaction completion" - * phase. The soft lock is not an actual lock on the database row - - * only upon the cached representation of the item.<br> - * <br> - * For the client, update lifecycles are: lock->evict->release, - * lock->update->afterUpdate, insert->afterInsert.<br> - * <br> + * phase; the soft lock is not an actual lock on the database row - + * only upon the cached representation of the item.</li> + * </ul> + * <p/> + * In terms of entity caches, the expected call sequences are: <ul> + * <li><b>DELETES</b> : {@link #lock} -> {@link #evict} -> {@link #release}</li> + * <li><b>UPDATES</b> : {@link #lock} -> {@link #update} -> {@link #afterUpdate}</li> + * <li><b>INSERTS</b> : {@link #insert} -> {@link #afterInsert}</li> + * </ul> + * <p/> + * In terms of collection caches, all modification actions actually just + * invalidate the entry(s). The call sequence here is: + * {@link #lock} -> {@link #evict} -> {@link #release} + * <p/> * Note that, for an asynchronous cache, cache invalidation must be a two * step process (lock->release, or lock-afterUpdate), since this is the only - * way to guarantee consistency with the database for a nontransaction cache + * way to guarantee consistency with the database for a nontransactional cache * implementation. For a synchronous cache, cache invalidation is a single * step process (evict, or update). Hence, this interface defines a three * step process, to cater for both models. + * <p/> + * Note that query result caching does not go through a concurrency strategy; they + * are managed directly against the underlying {@link Cache cache regions}. */ public interface CacheConcurrencyStrategy { - + /** - * Attempt to retrieve an object from the cache. + * Attempt to retrieve an object from the cache. Mainly used in attempting + * to resolve entities/collections from the second level cache. + * * @param key * @param txTimestamp a timestamp prior to the transaction start time * @return the cached object or <tt>null</tt> * @throws CacheException */ public Object get(Object key, long txTimestamp) throws CacheException; + /** * Attempt to cache an object, after loading from the database. + * * @param key * @param value * @param txTimestamp a timestamp prior to the transaction start time @@ -55,37 +71,39 @@ boolean minimalPut) throws CacheException; - /** * We are going to attempt to update/delete the keyed object. This - * method is used by "asynchronous" concurrency strategies. The - * returned object must be passed back to release(), to release the + * method is used by "asynchronous" concurrency strategies. + * <p/> + * The returned object must be passed back to release(), to release the * lock. Concurrency strategies which do not support client-visible * locks may silently return null. + * * @param key * @param version * @throws CacheException */ public SoftLock lock(Object key, Object version) throws CacheException; - - + /** * Called after an item has become stale (before the transaction completes). * This method is used by "synchronous" concurrency strategies. */ public void evict(Object key) throws CacheException; + /** * Called after an item has been updated (before the transaction completes), * instead of calling evict(). * This method is used by "synchronous" concurrency strategies. */ - public boolean update(Object key, Object value) throws CacheException; + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) throws CacheException; + /** * Called after an item has been inserted (before the transaction completes), * instead of calling evict(). * This method is used by "synchronous" concurrency strategies. */ - public boolean insert(Object key, Object value) throws CacheException; + public boolean insert(Object key, Object value, Object currentVersion) throws CacheException; /** Index: NonstrictReadWriteCache.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/NonstrictReadWriteCache.java,v retrieving revision 1.8 retrieving revision 1.9 diff -u -d -r1.8 -r1.9 --- NonstrictReadWriteCache.java 12 Feb 2005 07:19:08 -0000 1.8 +++ NonstrictReadWriteCache.java 9 Feb 2006 20:48:42 -0000 1.9 @@ -109,7 +109,7 @@ /** * Invalidate the item */ - public boolean update(Object key, Object value) throws CacheException { + public boolean insert(Object key, Object value, Object currentVersion) { evict(key); return false; } @@ -117,7 +117,7 @@ /** * Do nothing. */ - public boolean insert(Object key, Object value) throws CacheException { + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { return false; } Index: ReadOnlyCache.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/ReadOnlyCache.java,v retrieving revision 1.8 retrieving revision 1.9 diff -u -d -r1.8 -r1.9 --- ReadOnlyCache.java 16 Mar 2005 06:01:16 -0000 1.8 +++ ReadOnlyCache.java 9 Feb 2006 20:48:42 -0000 1.9 @@ -112,14 +112,14 @@ /** * Do nothing. */ - public boolean insert(Object key, Object value) throws CacheException { + public boolean insert(Object key, Object value, Object currentVersion) { return false; } /** * Unsupported! */ - public boolean update(Object key, Object value) throws CacheException { + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { log.error("Application attempted to edit read only item: " + key); throw new UnsupportedOperationException("Can't write to a readonly object"); } Index: ReadWriteCache.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/ReadWriteCache.java,v retrieving revision 1.9 retrieving revision 1.10 diff -u -d -r1.9 -r1.10 --- ReadWriteCache.java 30 Sep 2005 07:50:55 -0000 1.9 +++ ReadWriteCache.java 9 Feb 2006 20:48:42 -0000 1.10 @@ -311,14 +311,14 @@ /** * Do nothing. */ - public boolean insert(Object key, Object value) throws CacheException { + public boolean insert(Object key, Object value, Object currentVersion) { return false; } /** * Do nothing. */ - public boolean update(Object key, Object value) throws CacheException { + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { return false; } Index: TransactionalCache.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/TransactionalCache.java,v retrieving revision 1.10 retrieving revision 1.11 diff -u -d -r1.10 -r1.11 --- TransactionalCache.java 21 Apr 2005 07:57:19 -0000 1.10 +++ TransactionalCache.java 9 Feb 2006 20:48:42 -0000 1.11 @@ -47,7 +47,13 @@ return false; } if ( log.isDebugEnabled() ) log.debug("caching: " + key); - cache.put(key, value); +// cache.put(key, value); + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeLoad( key, value, version ); + } + else { + cache.put( key, value ); + } return true; } @@ -66,15 +72,67 @@ //noop } + public boolean update(Object key, Object value, Object currentVersion, Object previousVersion) { + if ( log.isDebugEnabled() ) { + log.debug("updating: " + key); + } + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeUpdate( key, value, currentVersion, previousVersion ); + } + else { + cache.update( key, value ); + } + return true; + } + + public boolean insert(Object key, Object value, Object currentVersion) throws CacheException { + if ( log.isDebugEnabled() ) { + log.debug("inserting: " + key); + } + if ( cache instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache ).writeInsert( key, value, currentVersion ); + } + else { + cache.update( key, value ); + } + return true; + } + public boolean update(Object key, Object value) throws CacheException { - if ( log.isDebugEnabled() ) log.debug("updating: " + key); - cache.update(key, value); +// if ( log.isDebugEnabled() ) log.debug("updating: " + key); +// cache.update(key, value); +// return true; + if ( log.isDebugEnabled() ) { + log.debug("updating: " + key); + } + if ( cache instanceof OptimisticCache ) { + // todo : need to call writeUpdate() instead + // but that requires this method to take previous and current versions + // ( ( OptimisticCache ) cache ).writeUpdate( key, value, currentVersion, previousVersion ); + ( ( OptimisticCache ) cache ).update( key, value ); + } + else { + cache.update( key, value ); + } return true; } public boolean insert(Object key, Object value) throws CacheException { - if ( log.isDebugEnabled() ) log.debug("inserting: " + key); - cache.update(key, value); +// if ( log.isDebugEnabled() ) log.debug("inserting: " + key); +// cache.update(key, value); +// return true; + if ( log.isDebugEnabled() ) { + log.debug("inserting: " + key); + } + if ( cache instanceof OptimisticCache ) { + // todo : need to call writeInsert() instead + // but that requires this method to take current version + // ( ( OptimisticCache ) cache ).writeInsert( key, value, currentVersion ); + ( ( OptimisticCache ) cache ).update( key, value ); + } + else { + cache.update( key, value ); + } return true; } Index: TreeCacheProvider.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/cache/TreeCacheProvider.java,v retrieving revision 1.6 retrieving revision 1.7 diff -u -d -r1.6 -r1.7 --- TreeCacheProvider.java 16 Mar 2005 06:01:17 -0000 1.6 +++ TreeCacheProvider.java 9 Feb 2006 20:48:42 -0000 1.7 @@ -4,6 +4,8 @@ import org.jboss.cache.PropertyConfigurator; import org.hibernate.transaction.TransactionManagerLookup; import org.hibernate.transaction.TransactionManagerLookupFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import javax.transaction.TransactionManager; import java.util.Properties; @@ -16,6 +18,11 @@ */ public class TreeCacheProvider implements CacheProvider { + public static final String CONFIG_RESOURCE = "hibernate.cache.tree_cache.config"; + public static final String DEFAULT_CONFIG = "treecache.xml"; + + private static final Log log = LogFactory.getLog( TreeCacheProvider.class ); + private org.jboss.cache.TreeCache cache; private TransactionManager transactionManager; @@ -43,10 +50,15 @@ * @throws CacheException Indicates a problem preparing cache for use. */ public void start(Properties properties) { + String resource = properties.getProperty( CONFIG_RESOURCE ); + if ( resource == null ) { + resource = DEFAULT_CONFIG; + } + log.debug( "Configuring TreeCache from resource [" + resource + "]" ); try { cache = new org.jboss.cache.TreeCache(); PropertyConfigurator config = new PropertyConfigurator(); - config.configure(cache, "treecache.xml"); + config.configure( cache, resource ); TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup(properties); if (transactionManagerLookup!=null) { cache.setTransactionManagerLookup( new TransactionManagerLookupAdaptor(transactionManagerLookup, properties) ); @@ -83,4 +95,7 @@ } } + public org.jboss.cache.TreeCache getUnderlyingCache() { + return cache; + } } |
From: <ste...@us...> - 2006-02-09 20:48:51
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/persister/entity In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7349/src/org/hibernate/persister/entity Modified Files: AbstractEntityPersister.java EntityPersister.java Log Message: HHH-1457 : JBossCache 1.3.0 optimistic locking support Index: AbstractEntityPersister.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/persister/entity/AbstractEntityPersister.java,v retrieving revision 1.26 retrieving revision 1.27 diff -u -d -r1.26 -r1.27 --- AbstractEntityPersister.java 3 Feb 2006 22:08:24 -0000 1.26 +++ AbstractEntityPersister.java 9 Feb 2006 20:48:42 -0000 1.27 @@ -14,6 +14,7 @@ import java.util.Iterator; import java.util.Map; import java.util.Set; +import java.util.Comparator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -290,7 +291,9 @@ } String[] result = new String[getTableSpan()]; result[0] = sqlUpdateByRowIdString; - for ( int i = 1; i < getTableSpan(); i++ ) result[i] = sqlUpdateStrings[i]; + for ( int i = 1; i < getTableSpan(); i++ ) { + result[i] = sqlUpdateStrings[i]; + } return result; } @@ -300,7 +303,9 @@ } String[] result = new String[getTableSpan()]; result[0] = sqlLazyUpdateByRowIdString; - for ( int i = 1; i < getTableSpan(); i++ ) result[i] = sqlLazyUpdateStrings[i]; + for ( int i = 1; i < getTableSpan(); i++ ) { + result[i] = sqlLazyUpdateStrings[i]; + } return result; } @@ -420,7 +425,9 @@ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ int batch = persistentClass.getBatchSize(); - if (batch==-1) batch = factory.getSettings().getDefaultBatchFetchSize(); + if ( batch == -1 ) { + batch = factory.getSettings().getDefaultBatchFetchSize(); + } batchSize = batch; hasSubselectLoadableCollections = persistentClass.hasSubselectLoadableCollections(); @@ -631,16 +638,22 @@ subclassPropertyCascadeStyleClosure = new CascadeStyle[cascades.size()]; iter = cascades.iterator(); int j = 0; - while ( iter.hasNext() ) subclassPropertyCascadeStyleClosure[j++] = ( CascadeStyle ) iter.next(); + while ( iter.hasNext() ) { + subclassPropertyCascadeStyleClosure[j++] = ( CascadeStyle ) iter.next(); + } subclassPropertyFetchModeClosure = new FetchMode[joinedFetchesList.size()]; iter = joinedFetchesList.iterator(); j = 0; - while ( iter.hasNext() ) subclassPropertyFetchModeClosure[j++] = ( FetchMode ) iter.next(); + while ( iter.hasNext() ) { + subclassPropertyFetchModeClosure[j++] = ( FetchMode ) iter.next(); + } propertyDefinedOnSubclass = new boolean[definedBySubclass.size()]; iter = definedBySubclass.iterator(); j = 0; - while ( iter.hasNext() ) propertyDefinedOnSubclass[j++] = ( ( Boolean ) iter.next() ).booleanValue(); + while ( iter.hasNext() ) { + propertyDefinedOnSubclass[j++] = ( ( Boolean ) iter.next() ).booleanValue(); + } // Handle any filters applied to the class level filterHelper = new FilterHelper( persistentClass.getFilterMap(), factory.getDialect() ); @@ -651,7 +664,9 @@ protected String generateLazySelectString() { - if ( !entityMetamodel.hasLazyProperties() ) return null; + if ( !entityMetamodel.hasLazyProperties() ) { + return null; + } HashSet tableNumbers = new HashSet(); ArrayList columnNumbers = new ArrayList(); @@ -761,10 +776,14 @@ result = propValue; } } - if (rs!=null) rs.close(); + if ( rs != null ) { + rs.close(); + } } finally { - if (ps!=null) session.getBatcher().closeStatement(ps); + if ( ps != null ) { + session.getBatcher().closeStatement( ps ); + } } log.trace( "done initializing lazy properties" ); @@ -959,9 +978,13 @@ } } - if ( entityMetamodel.hasSubclasses() ) addDiscriminatorToSelect( select, name, suffix ); + if ( entityMetamodel.hasSubclasses() ) { + addDiscriminatorToSelect( select, name, suffix ); + } - if ( hasRowId() ) select.addColumn( name, rowIdName, ROWID_ALIAS ); + if ( hasRowId() ) { + select.addColumn( name, rowIdName, ROWID_ALIAS ); + } return select.toFragmentString(); } @@ -981,7 +1004,9 @@ ResultSet rs = ps.executeQuery(); try { //if there is no resulting row, return null - if ( !rs.next() ) return null; + if ( !rs.next() ) { + return null; + } //otherwise return the "hydrated" state (ie. associations are not resolved) Type[] types = getPropertyTypes(); @@ -1136,7 +1161,9 @@ if ( log.isTraceEnabled() ) { log.trace( "Locking entity: " + MessageHelper.infoString( this, id, getFactory() ) ); - if ( isVersioned() ) log.trace( "Version: " + version ); + if ( isVersioned() ) { + log.trace( "Version: " + version ); + } } final String sql = getLockString( lockMode ); @@ -1200,8 +1227,12 @@ ResultSet rs = st.executeQuery(); try { - if ( !rs.next() ) return null; - if ( !isVersioned() ) return this; + if ( !rs.next() ) { + return null; + } + if ( !isVersioned() ) { + return this; + } return getVersionType().nullSafeGet( rs, getVersionColumnName(), session, null ); } finally { @@ -1294,9 +1325,13 @@ } protected String generateTableAlias(String rootAlias, int tableNumber) { - if ( tableNumber == 0 ) return rootAlias; + if ( tableNumber == 0 ) { + return rootAlias; + } StringBuffer buf = new StringBuffer().append( rootAlias ); - if ( !rootAlias.endsWith( "_" ) ) buf.append( '_' ); + if ( !rootAlias.endsWith( "_" ) ) { + buf.append( '_' ); + } return buf.append( tableNumber ).append( '_' ).toString(); } @@ -1407,7 +1442,9 @@ public String[] getSubclassPropertyColumnAliases(String propertyName, String suffix) { String rawAliases[] = ( String[] ) subclassPropertyAliases.get( propertyName ); - if ( rawAliases == null ) return null; + if ( rawAliases == null ) { + return null; + } String result[] = new String[rawAliases.length]; for ( int i = 0; i < rawAliases.length; i++ ) { @@ -1611,7 +1648,9 @@ initOrdinaryPropertyPaths(mapping); initOrdinaryPropertyPaths(mapping); //do two passes, for collection property-ref! initIdentifierPropertyPaths(mapping); - if ( entityMetamodel.isPolymorphic() ) initDiscriminatorPropertyPath(mapping); + if ( entityMetamodel.isPolymorphic() ) { + initDiscriminatorPropertyPath( mapping ); + } } protected UniqueEntityLoader createEntityLoader(LockMode lockMode, Map enabledFilters) throws MappingException { @@ -1751,7 +1790,9 @@ } // add the discriminator - if ( j == 0 ) addDiscriminatorToInsert( insert ); + if ( j == 0 ) { + addDiscriminatorToInsert( insert ); + } // add the primary key if ( j == 0 && identityInsert ) { @@ -1782,7 +1823,9 @@ Delete delete = new Delete() .setTableName( getTableName( j ) ) .setPrimaryKeyColumnNames( getKeyColumns( j ) ); - if ( j == 0 ) delete.setVersionColumnName( getVersionColumnName() ); + if ( j == 0 ) { + delete.setVersionColumnName( getVersionColumnName() ); + } if ( getFactory().getSettings().isCommentsEnabled() ) { delete.setComment( "delete " + getEntityName() ); } @@ -1901,7 +1944,9 @@ } } - if ( sequentialResultSet != null ) sequentialResultSet.close(); + if ( sequentialResultSet != null ) { + sequentialResultSet.close(); + } return values; @@ -1937,7 +1982,9 @@ if ( log.isTraceEnabled() ) { log.trace( "Inserting entity: " + getEntityName() + " (native id)" ); - if ( isVersioned() ) log.trace( "Version: " + Versioning.getVersion( fields, this ) ); + if ( isVersioned() ) { + log.trace( "Version: " + Versioning.getVersion( fields, this ) ); + } } try { @@ -2022,15 +2069,21 @@ final SessionImplementor session) throws HibernateException { - if ( isInverseTable( j ) ) return; + if ( isInverseTable( j ) ) { + return; + } //note: it is conceptually possible that a UserType could map null to // a non-null value, so the following is arguable: - if ( isNullableTable( j ) && isAllNull( fields, j ) ) return; + if ( isNullableTable( j ) && isAllNull( fields, j ) ) { + return; + } if ( log.isTraceEnabled() ) { log.trace( "Inserting entity: " + MessageHelper.infoString( this, id, getFactory() ) ); - if ( j == 0 && isVersioned() ) log.trace( "Version: " + Versioning.getVersion( fields, this ) ); + if ( j == 0 && isVersioned() ) { + log.trace( "Version: " + Versioning.getVersion( fields, this ) ); + } } boolean callable = isInsertCallable( j ); @@ -2065,11 +2118,15 @@ } catch ( SQLException sqle ) { - if ( useBatch ) session.getBatcher().abortBatch( sqle ); + if ( useBatch ) { + session.getBatcher().abortBatch( sqle ); + } throw sqle; } finally { - if ( !useBatch ) session.getBatcher().closeStatement( insert ); + if ( !useBatch ) { + session.getBatcher().closeStatement( insert ); + } } } catch ( SQLException sqle ) { @@ -2145,7 +2202,9 @@ if ( log.isTraceEnabled() ) { log.trace( "Updating entity: " + MessageHelper.infoString( this, id, getFactory() ) ); - if ( useVersion ) log.trace( "Existing version: " + oldVersion + " -> New version: " + fields[getVersionProperty()] ); + if ( useVersion ) { + log.trace( "Existing version: " + oldVersion + " -> New version: " + fields[getVersionProperty()] ); + } } try { @@ -2208,11 +2267,15 @@ } catch ( SQLException sqle ) { - if ( useBatch ) session.getBatcher().abortBatch( sqle ); + if ( useBatch ) { + session.getBatcher().abortBatch( sqle ); + } throw sqle; } finally { - if ( !useBatch ) session.getBatcher().closeStatement( update ); + if ( !useBatch ) { + session.getBatcher().closeStatement( update ); + } } } @@ -2236,8 +2299,10 @@ final String sql, final SessionImplementor session) throws HibernateException { - - if ( isInverseTable( j ) ) return; + + if ( isInverseTable( j ) ) { + return; + } final boolean useVersion = j == 0 && isVersioned(); final boolean callable = isDeleteCallable( j ); @@ -2245,7 +2310,9 @@ if ( log.isTraceEnabled() ) { log.trace( "Deleting entity: " + MessageHelper.infoString( this, id, getFactory() ) ); - if ( useVersion ) log.trace( "Version: " + version ); + if ( useVersion ) { + log.trace( "Version: " + version ); + } } if ( isTableCascadeDeleteEnabled( j ) ) { @@ -2294,11 +2361,15 @@ } catch ( SQLException sqle ) { - if ( useBatch ) session.getBatcher().abortBatch( sqle ); + if ( useBatch ) { + session.getBatcher().abortBatch( sqle ); + } throw sqle; } finally { - if ( !useBatch ) session.getBatcher().closeStatement( delete ); + if ( !useBatch ) { + session.getBatcher().closeStatement( delete ); + } } } @@ -2443,18 +2514,30 @@ protected void logStaticSQL() { if ( log.isDebugEnabled() ) { log.debug( "Static SQL for entity: " + getEntityName() ); - if ( sqlLazySelectString != null ) log.debug( " Lazy select: " + sqlLazySelectString ); - if ( sqlVersionSelectString != null ) log.debug( " Version select: " + sqlVersionSelectString ); - if ( sqlSnapshotSelectString != null ) log.debug( " Snapshot select: " + sqlSnapshotSelectString ); + if ( sqlLazySelectString != null ) { + log.debug( " Lazy select: " + sqlLazySelectString ); + } + if ( sqlVersionSelectString != null ) { + log.debug( " Version select: " + sqlVersionSelectString ); + } + if ( sqlSnapshotSelectString != null ) { + log.debug( " Snapshot select: " + sqlSnapshotSelectString ); + } for ( int j = 0; j < getTableSpan(); j++ ) { log.debug( " Insert " + j + ": " + getSQLInsertStrings()[j] ); log.debug( " Update " + j + ": " + getSQLUpdateStrings()[j] ); log.debug( " Delete " + j + ": " + getSQLDeleteStrings()[j] ); } - if ( sqlIdentityInsertString != null ) log.debug( " Identity insert: " + sqlIdentityInsertString ); - if ( sqlUpdateByRowIdString != null ) log.debug( " Update by row id (all fields): " + sqlUpdateByRowIdString ); - if ( sqlLazyUpdateByRowIdString != null ) log.debug( " Update by row id (non-lazy fields): " + sqlLazyUpdateByRowIdString ); + if ( sqlIdentityInsertString != null ) { + log.debug( " Identity insert: " + sqlIdentityInsertString ); + } + if ( sqlUpdateByRowIdString != null ) { + log.debug( " Update by row id (all fields): " + sqlUpdateByRowIdString ); + } + if ( sqlLazyUpdateByRowIdString != null ) { + log.debug( " Update by row id (non-lazy fields): " + sqlLazyUpdateByRowIdString ); + } if ( sqlInsertGeneratedValuesSelectString != null ) { log.debug( "Insert-generated property select: " + sqlInsertGeneratedValuesSelectString ); } @@ -2707,7 +2790,9 @@ } protected void createQueryLoader() { - if ( loaderName != null ) queryLoader = new NamedQueryLoader( loaderName, this ); + if ( loaderName != null ) { + queryLoader = new NamedQueryLoader( loaderName, this ); + } } /** @@ -2748,7 +2833,9 @@ private boolean isAllNull(Object[] array, int tableNumber) { for ( int i = 0; i < array.length; i++ ) { - if ( isPropertyOfTable( i, tableNumber ) && array[i] != null ) return false; + if ( isPropertyOfTable( i, tableNumber ) && array[i] != null ) { + return false; + } } return true; } @@ -2766,7 +2853,9 @@ final boolean[] updateability = getPropertyUpdateability(); //no need to check laziness, dirty checking handles that for ( int j = 0; j < dirtyProperties.length; j++ ) { int property = dirtyProperties[j]; - if ( updateability[property] ) propsToUpdate[property] = true; + if ( updateability[property] ) { + propsToUpdate[property] = true; + } } if ( isVersioned() ) { propsToUpdate[ getVersionProperty() ] = @@ -2782,7 +2871,9 @@ protected boolean[] getPropertiesToInsert(Object[] fields) { boolean[] notNull = new boolean[fields.length]; boolean[] insertable = getPropertyInsertability(); - for ( int i = 0; i < fields.length; i++ ) notNull[i] = insertable[i] && fields[i] != null; + for ( int i = 0; i < fields.length; i++ ) { + notNull[i] = insertable[i] && fields[i] != null; + } return notNull; } @@ -2891,6 +2982,10 @@ return cacheEntryStructure; } + public Comparator getVersionComparator() { + return isVersioned() ? getVersionType().getComparator() : null; + } + // temporary ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public final String getEntityName() { return entityMetamodel.getName(); @@ -2985,7 +3080,9 @@ } // we *always* assume an instance with a null // identifier or no identifier property is unsaved! - if ( id == null ) return Boolean.TRUE; + if ( id == null ) { + return Boolean.TRUE; + } // check the version unsaved-value, if appropriate final Object version = getVersion( entity, session.getEntityMode() ); @@ -2994,13 +3091,17 @@ // assigned identifiers Boolean result = entityMetamodel.getVersionProperty() .getUnsavedValue().isUnsaved( version ); - if ( result != null ) return result; + if ( result != null ) { + return result; + } } // check the id unsaved-value Boolean result = entityMetamodel.getIdentifierProperty() .getUnsavedValue().isUnsaved( id ); - if ( result != null ) return result; + if ( result != null ) { + return result; + } // check to see if it is in the second-level cache if ( hasCache() ) { Index: EntityPersister.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/persister/entity/EntityPersister.java,v retrieving revision 1.22 retrieving revision 1.23 diff -u -d -r1.22 -r1.23 --- EntityPersister.java 5 Dec 2005 18:45:52 -0000 1.22 +++ EntityPersister.java 9 Feb 2006 20:48:43 -0000 1.23 @@ -9,6 +9,7 @@ import org.hibernate.MappingException; import org.hibernate.EntityMode; import org.hibernate.cache.CacheConcurrencyStrategy; +import org.hibernate.cache.OptimisticCacheSource; import org.hibernate.cache.entry.CacheEntryStructure; import org.hibernate.engine.CascadeStyle; import org.hibernate.engine.SessionFactoryImplementor; @@ -27,7 +28,7 @@ * * @author Gavin King */ -public interface EntityPersister { +public interface EntityPersister extends OptimisticCacheSource { /** * The property name of the "special" identifier property in HQL |
From: <ste...@us...> - 2006-02-09 20:48:50
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/action In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7349/src/org/hibernate/action Modified Files: EntityInsertAction.java EntityUpdateAction.java Log Message: HHH-1457 : JBossCache 1.3.0 optimistic locking support Index: EntityInsertAction.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/action/EntityInsertAction.java,v retrieving revision 1.33 retrieving revision 1.34 diff -u -d -r1.33 -r1.34 --- EntityInsertAction.java 26 Sep 2005 19:29:02 -0000 1.33 +++ EntityInsertAction.java 9 Feb 2006 20:48:42 -0000 1.34 @@ -88,7 +88,8 @@ session.getEntityMode(), session.getFactory() ); - boolean put = persister.getCache().insert(ck, cacheEntry); +// boolean put = persister.getCache().insert(ck, cacheEntry); + boolean put = persister.getCache().insert( ck, cacheEntry, version ); if ( put && factory.getStatistics().isStatisticsEnabled() ) { factory.getStatisticsImplementor() Index: EntityUpdateAction.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/action/EntityUpdateAction.java,v retrieving revision 1.33 retrieving revision 1.34 diff -u -d -r1.33 -r1.34 --- EntityUpdateAction.java 26 Sep 2005 19:29:02 -0000 1.33 +++ EntityUpdateAction.java 9 Feb 2006 20:48:42 -0000 1.34 @@ -140,7 +140,8 @@ instance ); cacheEntry = persister.getCacheEntryStructure().structure(ce); - boolean put = persister.getCache().update(ck, cacheEntry); +// boolean put = persister.getCache().update(ck, cacheEntry); + boolean put = persister.getCache().update( ck, cacheEntry, nextVersion, previousVersion ); if ( put && factory.getStatistics().isStatisticsEnabled() ) { factory.getStatisticsImplementor() |
From: <ste...@us...> - 2006-02-09 20:48:50
|
Update of /cvsroot/hibernate/Hibernate3/src/org/hibernate/impl In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv7349/src/org/hibernate/impl Modified Files: SessionFactoryImpl.java Log Message: HHH-1457 : JBossCache 1.3.0 optimistic locking support Index: SessionFactoryImpl.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/src/org/hibernate/impl/SessionFactoryImpl.java,v retrieving revision 1.106 retrieving revision 1.107 diff -u -d -r1.106 -r1.107 --- SessionFactoryImpl.java 8 Feb 2006 18:48:21 -0000 1.106 +++ SessionFactoryImpl.java 9 Feb 2006 20:48:42 -0000 1.107 @@ -43,6 +43,7 @@ import org.hibernate.cache.CacheKey; import org.hibernate.cache.QueryCache; import org.hibernate.cache.UpdateTimestampsCache; +import org.hibernate.cache.OptimisticCache; import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Settings; import org.hibernate.cfg.Environment; @@ -215,6 +216,9 @@ } } EntityPersister cp = PersisterFactory.createClassPersister(model, cache, this, mapping); + if ( cache != null && cache.getCache() instanceof OptimisticCache ) { + ( ( OptimisticCache ) cache.getCache() ).setSource( cp ); + } entityPersisters.put( model.getEntityName(), cp ); classMeta.put( model.getEntityName(), cp.getClassMetadata() ); } |
From: <max...@us...> - 2006-02-09 18:49:53
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/sql In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv19516/test/org/hibernate/test/sql Modified Files: Tag: Branch_3_1 General.hbm.xml Log Message: make it run on dbs that are case specific Index: General.hbm.xml =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/sql/General.hbm.xml,v retrieving revision 1.1.2.1 retrieving revision 1.1.2.2 diff -u -d -r1.1.2.1 -r1.1.2.2 --- General.hbm.xml 9 Feb 2006 17:30:48 -0000 1.1.2.1 +++ General.hbm.xml 9 Feb 2006 18:49:45 -0000 1.1.2.2 @@ -13,34 +13,34 @@ <hibernate-mapping package="org.hibernate.test.sql" default-access="field"> - <class name="Organization"> - <id name="id" unsaved-value="0" column="orgid"> + <class name="Organization" table="ORGANIZATION"> + <id name="id" unsaved-value="0" column="ORGID"> <generator class="increment"/> </id> - <property name="name" not-null="true"/> + <property name="name" column="NAME" not-null="true"/> <set lazy="true" name="employments" inverse="true"> - <key column="employer"/> <!-- only needed for DDL generation --> + <key column="EMPLOYER"/> <!-- only needed for DDL generation --> <one-to-many class="Employment"/> </set> </class> - <class name="Person"> - <id name="id" unsaved-value="0" column="perid"> + <class name="Person" table="PERSON"> + <id name="id" unsaved-value="0" column="PERID"> <generator class="increment"/> </id> - <property name="name" not-null="true"/> + <property name="name" column="NAME" not-null="true"/> </class> - <class name="Employment"> - <id name="employmentId" unsaved-value="0" column="empid"> + <class name="Employment" table="EMPLOYMENT"> + <id name="employmentId" unsaved-value="0" column="EMPID"> <generator class="increment"/> </id> - <many-to-one name="employee" not-null="true" update="false"/> - <many-to-one name="employer" not-null="true" update="false"/> - <property name="startDate" not-null="false"/> - <property name="endDate" insert="false"/> - <property name="regionCode" update="false"/> + <many-to-one name="employee" column="EMPLOYEE" not-null="true" update="false"/> + <many-to-one name="employer" column="EMPLOYER" not-null="true" update="false"/> + <property name="startDate" column="STARTDATE" not-null="false"/> + <property name="endDate" column="ENDDATE" insert="false"/> + <property name="regionCode" column="REGIONCODE" update="false"/> <property name="salary" type="org.hibernate.test.sql.MonetaryAmountUserType"> <column name="VALUE" sql-type="float"/> <column name="CURRENCY"/> @@ -102,12 +102,12 @@ <sql-query name="EmploymentAndPerson"> <return class="Employment"/> <return class="Person"/> - SELECT * FROM Employment, Person + SELECT * FROM EMPLOYMENT, PERSON </sql-query> <sql-query name="organizationEmploymentsExplicitAliases"> <load-collection alias="empcol" role="Organization.employments"/> - SELECT empcol.employer as {empcol.key}, empcol.empid as {empcol.element}, {empcol.element.*} + SELECT empcol.EMPLOYER as {empcol.key}, empcol.EMPID as {empcol.element}, {empcol.element.*} FROM EMPLOYMENT empcol WHERE EMPLOYER = :id ORDER BY STARTDATE ASC, EMPLOYEE ASC @@ -115,12 +115,12 @@ <sql-query name="organizationreturnproperty"> <return alias="org" class="Organization"> - <return-property name="id" column="orgid"/> - <return-property name="name" column="name"/> + <return-property name="id" column="ORGID"/> + <return-property name="name" column="NAME"/> </return> <return-join alias="emp" property="org.employments"> - <return-property name="key" column="employer"/> - <return-property name="element" column="empid"/> + <return-property name="key" column="EMPLOYER"/> + <return-property name="element" column="EMPID"/> <return-property name="element.employee" column="EMPLOYEE"/> <return-property name="element.employer" column="EMPLOYER"/> <return-property name="element.startDate" column="XSTARTDATE"/> @@ -132,8 +132,7 @@ <return-column name="CURRENCY"/> </return-property> </return-join> - SELECT org.orgid as orgid, org.name as name, emp.employer as employer, emp.empid as empid, emp.employee as employee, emp.employer as employer, emp.startDate as xstartDate, emp.endDate as endDate, emp.regionCode as regionCode, emp.VALUE as VALUE, emp.CURRENCY as CURRENCY - FROM ORGANIZATION org + SELECT org.ORGID as orgid, org.NAME as name, emp.EMPLOYER as employer, emp.EMPID as empid, emp.EMPLOYEE as employee, emp.EMPLOYER as employer, emp.STARTDATE as xstartDate, emp.ENDDATE as endDate, emp.REGIONCODE as regionCode, emp.VALUE as VALUE, emp.CURRENCY as CURRENCY FROM ORGANIZATION org LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER </sql-query> @@ -142,7 +141,7 @@ <!-- equal to "organizationpropertyreturn" but since no {} nor return-property are used hibernate will fallback to use the columns directly from the mapping --> <return alias="org" class="Organization"/> <return-join alias="emp" property="org.employments"/> - SELECT org.orgid as orgid, org.name as name, emp.employer as employer, emp.empid as empid, emp.employee as employee, emp.employer as employer, emp.startDate as startDate, emp.endDate as endDate, emp.regionCode as regionCode, emp.VALUE as VALUE, emp.CURRENCY as CURRENCY + SELECT org.ORGID as orgid, org.NAME as name, emp.EMPLOYER as employer, emp.EMPID as empid, emp.EMPLOYEE as employee, emp.EMPLOYER as employer, emp.STARTDATE as startDate, emp.ENDDATE as endDate, emp.REGIONCODE as regionCode, emp.VALUE as VALUE, emp.CURRENCY as CURRENCY FROM ORGANIZATION org LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER </sql-query> |
From: <max...@us...> - 2006-02-09 18:49:22
|
Update of /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/sql In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv19350/test/org/hibernate/test/sql Modified Files: Tag: Branch_3_1 GeneralTest.java Log Message: make it run on dbs that are case specific Index: GeneralTest.java =================================================================== RCS file: /cvsroot/hibernate/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java,v retrieving revision 1.1.2.1 retrieving revision 1.1.2.2 diff -u -d -r1.1.2.1 -r1.1.2.2 --- GeneralTest.java 9 Feb 2006 17:30:48 -0000 1.1.2.1 +++ GeneralTest.java 9 Feb 2006 18:49:14 -0000 1.1.2.2 @@ -23,18 +23,18 @@ } protected String getOrganizationFetchJoinEmploymentSQL() { - return "SELECT org.orgid as {org.id}, " + - " org.name as {org.name}, " + - " emp.employer as {emp.key}, " + - " emp.empid as {emp.element}, " + + return "SELECT org.ORGID as {org.id}, " + + " org.NAME as {org.name}, " + + " emp.EMPLOYER as {emp.key}, " + + " emp.EMPID as {emp.element}, " + " {emp.element.*} " + "FROM ORGANIZATION org " + " LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER"; } protected String getOrganizationJoinEmploymentSQL() { - return "SELECT org.orgid as {org.id}, " + - " org.name as {org.name}, " + + return "SELECT org.ORGID as {org.id}, " + + " org.NAME as {org.name}, " + " {emp.*} " + "FROM ORGANIZATION org " + " LEFT OUTER JOIN EMPLOYMENT emp ON org.ORGID = emp.EMPLOYER"; |