You can subscribe to this list here.
2002 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
(22) |
Nov
(308) |
Dec
(131) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2003 |
Jan
(369) |
Feb
(171) |
Mar
(236) |
Apr
(187) |
May
(218) |
Jun
(217) |
Jul
(127) |
Aug
(448) |
Sep
(270) |
Oct
(231) |
Nov
(422) |
Dec
(255) |
2004 |
Jan
(111) |
Feb
(73) |
Mar
(338) |
Apr
(351) |
May
(349) |
Jun
(495) |
Jul
(394) |
Aug
(1048) |
Sep
(499) |
Oct
(142) |
Nov
(269) |
Dec
(638) |
2005 |
Jan
(825) |
Feb
(1272) |
Mar
(593) |
Apr
(690) |
May
(950) |
Jun
(958) |
Jul
(767) |
Aug
(839) |
Sep
(525) |
Oct
(449) |
Nov
(585) |
Dec
(455) |
2006 |
Jan
(603) |
Feb
(656) |
Mar
(195) |
Apr
(114) |
May
(136) |
Jun
(100) |
Jul
(128) |
Aug
(68) |
Sep
(7) |
Oct
(1) |
Nov
(1) |
Dec
(8) |
2007 |
Jan
(4) |
Feb
(3) |
Mar
(8) |
Apr
(16) |
May
(5) |
Jun
(4) |
Jul
(6) |
Aug
(23) |
Sep
(15) |
Oct
(5) |
Nov
(7) |
Dec
(5) |
2008 |
Jan
(5) |
Feb
(1) |
Mar
(1) |
Apr
(5) |
May
(1) |
Jun
(1) |
Jul
|
Aug
|
Sep
|
Oct
|
Nov
(1) |
Dec
|
2009 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
(1) |
Dec
|
2011 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
(1) |
2012 |
Jan
|
Feb
|
Mar
|
Apr
(1) |
May
|
Jun
(1) |
Jul
(1) |
Aug
(1) |
Sep
|
Oct
(2) |
Nov
(3) |
Dec
(2) |
2013 |
Jan
(1) |
Feb
|
Mar
(2) |
Apr
(1) |
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2014 |
Jan
|
Feb
|
Mar
(1) |
Apr
|
May
(2) |
Jun
(1) |
Jul
|
Aug
(1) |
Sep
(1) |
Oct
|
Nov
(1) |
Dec
|
2015 |
Jan
|
Feb
|
Mar
|
Apr
(1) |
May
|
Jun
|
Jul
|
Aug
|
Sep
(1) |
Oct
|
Nov
|
Dec
|
2016 |
Jan
|
Feb
|
Mar
(1) |
Apr
|
May
(1) |
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2017 |
Jan
(1) |
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
From: <hib...@li...> - 2006-07-05 15:05:04
|
Author: ste...@jb... Date: 2006-07-05 11:03:52 -0400 (Wed, 05 Jul 2006) New Revision: 10084 Added: trunk/Hibernate3/src/org/hibernate/usertype/LoggableUserType.java Modified: trunk/Hibernate3/src/org/hibernate/type/ArrayType.java trunk/Hibernate3/src/org/hibernate/type/CollectionType.java trunk/Hibernate3/src/org/hibernate/type/CustomCollectionType.java trunk/Hibernate3/src/org/hibernate/type/CustomType.java Log: HHH-1586 and HHH-1881 : custom collection type logging Modified: trunk/Hibernate3/src/org/hibernate/type/ArrayType.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/type/ArrayType.java 2006-07-04 17:58:50 UTC (rev 10083) +++ trunk/Hibernate3/src/org/hibernate/type/ArrayType.java 2006-07-05 15:03:52 UTC (rev 10084) @@ -57,7 +57,9 @@ } public String toLoggableString(Object value, SessionFactoryImplementor factory) throws HibernateException { - if (value==null) return "null"; + if ( value == null ) { + return "null"; + } int length = Array.getLength(value); List list = new ArrayList(length); Type elemType = getElementType(factory); Modified: trunk/Hibernate3/src/org/hibernate/type/CollectionType.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/type/CollectionType.java 2006-07-04 17:58:50 UTC (rev 10083) +++ trunk/Hibernate3/src/org/hibernate/type/CollectionType.java 2006-07-05 15:03:52 UTC (rev 10084) @@ -134,28 +134,33 @@ public String toLoggableString(Object value, SessionFactoryImplementor factory) throws HibernateException { + if ( value == null ) { + return "null"; + } + else if ( !Hibernate.isInitialized( value ) ) { + return "<uninitialized>"; + } + else { + return renderLoggableString( value, factory ); + } + } - if ( value == null ) return "null"; - - if ( Hibernate.isInitialized( value ) ) { - if ( getReturnedClass().isInstance(value) ) { - List list = new ArrayList(); - Type elemType = getElementType( factory ); - Iterator iter = getElementsIterator( value ); - while ( iter.hasNext() ) { - list.add( elemType.toLoggableString( iter.next(), factory ) ); - } - return list.toString(); - } - else { - // for DOM4J "collections" only - return ( (Element) value ).asXML(); //TODO: it would be better if this was done at the higher level by Printer - } + protected String renderLoggableString(Object value, SessionFactoryImplementor factory) + throws HibernateException { + if ( Element.class.isInstance( value ) ) { + // for DOM4J "collections" only + // TODO: it would be better if this was done at the higher level by Printer + return ( ( Element ) value ).asXML(); } else { - return "<uninitialized>"; + List list = new ArrayList(); + Type elemType = getElementType( factory ); + Iterator iter = getElementsIterator( value ); + while ( iter.hasNext() ) { + list.add( elemType.toLoggableString( iter.next(), factory ) ); + } + return list.toString(); } - } public Object deepCopy(Object value, EntityMode entityMode, SessionFactoryImplementor factory) Modified: trunk/Hibernate3/src/org/hibernate/type/CustomCollectionType.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/type/CustomCollectionType.java 2006-07-04 17:58:50 UTC (rev 10083) +++ trunk/Hibernate3/src/org/hibernate/type/CustomCollectionType.java 2006-07-05 15:03:52 UTC (rev 10084) @@ -7,10 +7,13 @@ import org.hibernate.HibernateException; import org.hibernate.MappingException; +import org.hibernate.Hibernate; import org.hibernate.collection.PersistentCollection; import org.hibernate.engine.SessionImplementor; +import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.usertype.UserCollectionType; +import org.hibernate.usertype.LoggableUserType; /** * A custom type for mapping user-written classes that implement <tt>PersistentCollection</tt> @@ -22,27 +25,29 @@ public class CustomCollectionType extends CollectionType { private final UserCollectionType userType; + private final boolean customLogging; public CustomCollectionType(Class userTypeClass, String role, String foreignKeyPropertyName, boolean isEmbeddedInXML) { super(role, foreignKeyPropertyName, isEmbeddedInXML); - - if ( !UserCollectionType.class.isAssignableFrom(userTypeClass) ) { + + if ( !UserCollectionType.class.isAssignableFrom( userTypeClass ) ) { throw new MappingException( "Custom type does not implement UserCollectionType: " + userTypeClass.getName() ); } - + try { - userType = (UserCollectionType) userTypeClass.newInstance(); + userType = ( UserCollectionType ) userTypeClass.newInstance(); } - catch (InstantiationException ie) { + catch ( InstantiationException ie ) { throw new MappingException( "Cannot instantiate custom type: " + userTypeClass.getName() ); } - catch (IllegalAccessException iae) { + catch ( IllegalAccessException iae ) { throw new MappingException( "IllegalAccessException trying to instantiate custom type: " + userTypeClass.getName() ); } + customLogging = LoggableUserType.class.isAssignableFrom( userTypeClass ); } - - public PersistentCollection instantiate(SessionImplementor session, CollectionPersister persister, Serializable key) + + public PersistentCollection instantiate(SessionImplementor session, CollectionPersister persister, Serializable key) throws HibernateException { return userType.instantiate(session, persister); } @@ -68,10 +73,19 @@ public Object indexOf(Object collection, Object entity) { return userType.indexOf(collection, entity); } - + public Object replaceElements(Object original, Object target, Object owner, Map copyCache, SessionImplementor session) throws HibernateException { CollectionPersister cp = session.getFactory().getCollectionPersister( getRole() ); return userType.replaceElements(original, target, cp, owner, copyCache, session); } + + protected String renderLoggableString(Object value, SessionFactoryImplementor factory) throws HibernateException { + if ( customLogging ) { + return ( ( LoggableUserType ) userType ).toLoggableString( value, factory ); + } + else { + return super.renderLoggableString( value, factory ); + } + } } Modified: trunk/Hibernate3/src/org/hibernate/type/CustomType.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/type/CustomType.java 2006-07-04 17:58:50 UTC (rev 10083) +++ trunk/Hibernate3/src/org/hibernate/type/CustomType.java 2006-07-05 15:03:52 UTC (rev 10084) @@ -21,9 +21,11 @@ import org.hibernate.usertype.EnhancedUserType; import org.hibernate.usertype.UserType; import org.hibernate.usertype.UserVersionType; +import org.hibernate.usertype.LoggableUserType; /** - * Adapts <tt>UserType</tt> to the generic <tt>Type</tt> interface. + * Adapts {@link UserType} to the generic {@link Type} interface, in order + * to isolate user code from changes in the internal Type contracts. * * @see org.hibernate.usertype.UserType * @author Gavin King @@ -33,40 +35,39 @@ private final UserType userType; private final String name; private final int[] types; + private final boolean customLogging; public CustomType(Class userTypeClass, Properties parameters) throws MappingException { + if ( !UserType.class.isAssignableFrom( userTypeClass ) ) { + throw new MappingException( + "Custom type does not implement UserType: " + + userTypeClass.getName() + ); + } + name = userTypeClass.getName(); - if ( !UserType.class.isAssignableFrom(userTypeClass) ) { - throw new MappingException( - "Custom type does not implement UserType: " + - userTypeClass.getName() - ); - } - try { - userType = (UserType) userTypeClass.newInstance(); + userType = ( UserType ) userTypeClass.newInstance(); } - catch (InstantiationException ie) { - throw new MappingException( - "Cannot instantiate custom type: " + - userTypeClass.getName() + catch ( InstantiationException ie ) { + throw new MappingException( + "Cannot instantiate custom type: " + + userTypeClass.getName() ); } - catch (IllegalAccessException iae) { - throw new MappingException( - "IllegalAccessException trying to instantiate custom type: " + - userTypeClass.getName() + catch ( IllegalAccessException iae ) { + throw new MappingException( + "IllegalAccessException trying to instantiate custom type: " + + userTypeClass.getName() ); } - /*if ( !Serializable.class.isAssignableFrom( userType.returnedClass() ) ) { - LogFactory.getLog(CustomType.class).warn("custom type does not implement Serializable: " + userTypeClass); - }*/ - TypeFactory.injectParameters(userType, parameters); + TypeFactory.injectParameters( userType, parameters ); types = userType.sqlTypes(); + customLogging = LoggableUserType.class.isAssignableFrom( userTypeClass ); } public int[] sqlTypes(Mapping pi) { @@ -85,7 +86,7 @@ return userType.equals(x, y); } - public boolean isEqual(Object x, Object y, EntityMode entityMode) + public boolean isEqual(Object x, Object y, EntityMode entityMode) throws HibernateException { return isEqual(x, y); } @@ -93,7 +94,7 @@ public int getHashCode(Object x, EntityMode entityMode) { return userType.hashCode(x); } - + public Object nullSafeGet( ResultSet rs, String[] names, @@ -114,7 +115,7 @@ } - public Object assemble(Serializable cached, SessionImplementor session, Object owner) + public Object assemble(Serializable cached, SessionImplementor session, Object owner) throws HibernateException { return userType.assemble(cached, owner); } @@ -125,20 +126,20 @@ } public Object replace( - Object original, + Object original, Object target, - SessionImplementor session, - Object owner, + SessionImplementor session, + Object owner, Map copyCache) throws HibernateException { return userType.replace(original, target, owner); } - + public void nullSafeSet( PreparedStatement st, Object value, int index, - boolean[] settable, + boolean[] settable, SessionImplementor session ) throws HibernateException, SQLException { @@ -173,7 +174,7 @@ return name; } - public Object deepCopy(Object value, EntityMode entityMode, SessionFactoryImplementor factory) + public Object deepCopy(Object value, EntityMode entityMode, SessionFactoryImplementor factory) throws HibernateException { return userType.deepCopy(value); } @@ -206,14 +207,22 @@ return fromXMLString( xml.getText(), factory ); } - public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory) + public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory) throws HibernateException { node.setText( toXMLString(value, factory) ); } - public String toLoggableString(Object value, SessionFactoryImplementor factory) + public String toLoggableString(Object value, SessionFactoryImplementor factory) throws HibernateException { - return value==null ? "null" : toXMLString(value, factory); + if ( value == null ) { + return "null"; + } + else if ( customLogging ) { + return ( ( LoggableUserType ) userType ).toLoggableString( value, factory ); + } + else { + return toXMLString( value, factory ); + } } public boolean[] toColumnNullness(Object value, Mapping mapping) { @@ -221,9 +230,9 @@ if (value!=null) Arrays.fill(result, true); return result; } - + public boolean isDirty(Object old, Object current, boolean[] checkable, SessionImplementor session) throws HibernateException { return checkable[0] && isDirty(old, current, session); } - + } Added: trunk/Hibernate3/src/org/hibernate/usertype/LoggableUserType.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/usertype/LoggableUserType.java 2006-07-04 17:58:50 UTC (rev 10083) +++ trunk/Hibernate3/src/org/hibernate/usertype/LoggableUserType.java 2006-07-05 15:03:52 UTC (rev 10084) @@ -0,0 +1,20 @@ +package org.hibernate.usertype; + +import org.hibernate.engine.SessionFactoryImplementor; + +/** + * Marker interface for user types which want to perform custom + * logging of their corresponding values + * + * @author Steve Ebersole + */ +public interface LoggableUserType { + /** + * Generate a loggable string representation of the collection (value). + * + * @param value The collection to be logged; guarenteed to be non-null and initialized. + * @param factory The factory. + * @return The loggable string representation. + */ + public String toLoggableString(Object value, SessionFactoryImplementor factory); +} |
From: Lori T. <pok...@po...> - 2006-07-04 20:25:43
|
This is a Brand New Compnay Never Marketed Before. We have seen these types move 4.00 in the first day of marketing. We Expect the 6th to be unbelieveable. THIS IS A SOLID COMPANY. PETROSUN DRILLING (PSUD) Current Price: 1.30 Up .05 Friday without any marketing! Current News PetroSun Announces Formation of Algae BioFuels PetroSun Drilling Inc. (PSUD - News), an emerging provider of oilfield services to major and independent producers of oil and natural gas, announced today that the company has formed Algae BioFuels Inc. as a wholly owned subsidiary. Algae BioFuels will be engaged in the research and development of algae cultivation as an energy source in the production of biodiesel, an economically feasible and eco-friendly alternative to petroleum-based transportation fuels. The R&D and production facilities for Algae BioFuels will be based in Arizona and Australia. "PetroSun's formation of Algae BioFuels is a forward-looking strategy," said L. Rayfield Wright, president of PetroSun. "The 0pp0rtunity to produce a renewable energy product that will assist in providing a healthier planet for future generations cannot be ignored." Biofuel is any fuel that is derived from biomass -- which contains recently living organisms or their metabolic byproducts. Biofuel is a renewable energy source, unlike other natural resources such as petroleum, coal and nuclear fuels. Agricultural products specifically grown for use as biofuels include corn and soybeans. Extensive research is currently being conducted to determine the utilization of microalgae as an energy source, with applications being developed for biodiesel, ethanol, methanol, methane and even hydrogen. Independent studies have demonstrated that algae is capable of producing 30 times more oil per acre than the current crops now utilized for the production of biofuels. Algae biofuel contains no sulfur, is non-toxic and highly biodegradable. The Office of Fuels Development, a division of the Department of Energy, funded a program from 1978 through 1996 under the National Renewable Energy Laboratory known as the "Aquatic Species Program." The focus of this program was to investigate high-oil algae that could be grown specifically for the purpose of wide-scale biodiesel production. Some species of algae are ideally suited to biodiesel production due to their high oil content, in excess of 50%, and extremely rapid growth rates. One of the biggest advantages of biodiesel, compared to many other alternative transportation fuels, is that it can be used in existing diesel engines, which relieves automotive manufacturers of having to make costly engine modifications. Biodiesel can also be mixed, at any ratio, with conventional petroleum diesel. As a result, the alternative fuel can be used in the current distribution infrastructure, replacing petroleum diesel either wholly, or as a diesel fuel blend with minimal integration costs. About PetroSun PetroSun's current operations are concentrated in the Ark-La-Tex region with plans to expand into New Mexico, Arizona, Utah and Australia in 2006. PetroSun provides a comprehensive array of products and services to the oil industry. The company's cutting-edge technologies, combined with a proven ability to apply them effectively and safely within a disciplined ROI framework, creates long-term value for PetroSun shareholders and partners. PetroSun is headquartered in Phoenix. |
From: <hib...@li...> - 2006-07-04 17:58:55
|
Author: epbernard Date: 2006-07-04 13:58:50 -0400 (Tue, 04 Jul 2006) New Revision: 10083 Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java Log: init ram directory index (Sylvain Vieujot) Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-04 01:28:33 UTC (rev 10082) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-04 17:58:50 UTC (rev 10083) @@ -9,9 +9,10 @@ /** * Set up and provide a Lucene <code>Directory</code> - * <code>equals()</code> and <code>hashCode()</code> must guaranty to - * return true for a provider pointing to the same underlying Lucene Store + * <code>equals()</code> and <code>hashCode()</code> must guaranty equality + * between two providers pointing to the same underlying Lucene Store * This class must be thread safe regarding <code>getDirectory()</code> + * calls * * @author Emmanuel Bernard * @author Sylvain Vieujot Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java 2006-07-04 01:28:33 UTC (rev 10082) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java 2006-07-04 17:58:50 UTC (rev 10083) @@ -1,9 +1,13 @@ //$Id: $ package org.hibernate.lucene.store; +import java.io.IOException; import java.util.Properties; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.RAMDirectory; +import org.hibernate.HibernateException; import org.hibernate.cfg.Configuration; /** @@ -20,6 +24,13 @@ public void initialize(Class entity, Configuration cfg, Properties properties) { indexName = DirectoryProviderFactory.getTypeName( entity ); directory = new RAMDirectory(); + try { + IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), true ); + iw.close(); + } + catch (IOException e) { + throw new HibernateException( "Unable to initialize index: " + indexName, e ); + } } public RAMDirectory getDirectory() { |
From: <hib...@li...> - 2006-07-04 01:28:39
|
Author: honma Date: 2006-07-03 21:28:33 -0400 (Mon, 03 Jul 2006) New Revision: 10082 Added: trunk/Hibernate3/doc/reference/ja/readme_ja.txt Log: test Added: trunk/Hibernate3/doc/reference/ja/readme_ja.txt =================================================================== --- trunk/Hibernate3/doc/reference/ja/readme_ja.txt 2006-07-04 01:17:19 UTC (rev 10081) +++ trunk/Hibernate3/doc/reference/ja/readme_ja.txt 2006-07-04 01:28:33 UTC (rev 10082) @@ -0,0 +1 @@ +test commit. \ No newline at end of file |
From: <hib...@li...> - 2006-07-04 01:17:23
|
Author: honma Date: 2006-07-03 21:17:19 -0400 (Mon, 03 Jul 2006) New Revision: 10081 Added: trunk/Hibernate3/doc/reference/ja/ Log: first commit |
From: <hib...@li...> - 2006-07-03 14:14:04
|
Author: ste...@jb... Date: 2006-07-03 10:13:46 -0400 (Mon, 03 Jul 2006) New Revision: 10080 Modified: trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java Log: JDK5 auto-boxing correction Modified: trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java 2006-07-03 13:58:53 UTC (rev 10079) +++ trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java 2006-07-03 14:13:46 UTC (rev 10080) @@ -546,7 +546,7 @@ Session s = openSession(); Transaction t = s.beginTransaction(); Speech speech = new Speech(); - speech.setLength( 23d ); + speech.setLength( new Double( 23d ) ); speech.setName( "Mine" ); s.persist( speech ); s.flush(); |
From: <hib...@li...> - 2006-07-02 01:41:48
|
Author: epbernard Date: 2006-07-01 21:41:42 -0400 (Sat, 01 Jul 2006) New Revision: 10078 Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java Log: ANN-386 add support for RAMProvider fix equals and hashCode methods for *DirectoryProvider minor generics enhancements Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-01 17:18:12 UTC (rev 10077) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-02 01:41:42 UTC (rev 10078) @@ -30,14 +30,14 @@ * @author Sylvain Vieujot */ public class DirectoryProviderFactory { - public List<DirectoryProvider> providers = new ArrayList<DirectoryProvider>(); + public List<DirectoryProvider<?>> providers = new ArrayList<DirectoryProvider<?>>(); private static String LUCENE_PREFIX = "hibernate.lucene."; private static String LUCENE_DEFAULT = LUCENE_PREFIX + "default."; private static String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName(); - public DirectoryProvider createDirectoryProvider(Class<?> entity, Configuration cfg) { + public DirectoryProvider<?> createDirectoryProvider(Class<?> entity, Configuration cfg) { //get properties Properties indexProps = getDirectoryProperties( cfg, entity ); @@ -46,7 +46,7 @@ if ( StringHelper.isEmpty( className ) ) { className = DEFAULT_DIRECTORY_PROVIDER; } - DirectoryProvider provider = null; + DirectoryProvider<?> provider = null; try { Class<DirectoryProvider> directoryClass = ReflectHelper.classForName( className, DirectoryProviderFactory.class @@ -54,7 +54,7 @@ provider = directoryClass.newInstance(); } catch (Exception e) { - throw new HibernateException( "Unable to instanciate directory provider: " + className ); + throw new HibernateException( "Unable to instanciate directory provider: " + className, e ); } provider.initialize( entity, cfg, indexProps ); int index = providers.indexOf( provider ); Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-01 17:18:12 UTC (rev 10077) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-02 01:41:42 UTC (rev 10078) @@ -8,10 +8,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.lucene.store.Directory; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.hibernate.HibernateException; import org.hibernate.cfg.Configuration; @@ -23,11 +22,12 @@ * @author Emmanuel Bernard * @author Sylvain Vieujot */ -public class FSDirectoryProvider implements DirectoryProvider { +public class FSDirectoryProvider implements DirectoryProvider<FSDirectory> { private FSDirectory directory; private static Log log = LogFactory.getLog( FSDirectoryProvider.class ); + private String indexName; - public void initialize(Class entity, Configuration cfg, Properties properties) { + public void initialize(Class entity, @SuppressWarnings("unused") Configuration cfg, Properties properties) { String indexBase = properties.getProperty( "indexBase", "." ); File indexDir = new File( indexBase ); @@ -45,7 +45,8 @@ try { boolean create = !file.exists(); - directory = FSDirectory.getDirectory( file.getCanonicalPath(), create ); + indexName = file.getCanonicalPath(); + directory = FSDirectory.getDirectory( indexName, create ); if (create) { IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create ); iw.close(); @@ -57,7 +58,25 @@ } - public Directory getDirectory() { + public FSDirectory getDirectory() { return directory; } + + @Override + public boolean equals(Object obj) { + // this code is actually broken since the value change after initialize call + // but from a practical POV this is fine since we only call this method + // after initialize call + if (obj == this) return true; + if (obj == null || ! (obj instanceof FSDirectoryProvider) ) return false; + return indexName.equals( ( (FSDirectoryProvider) obj).indexName ); + } + + @Override + public int hashCode() { + // this code is actually broken since the value change after initialize call + // but from a practical POV this is fine since we only call this method + // after initialize call + return indexName.hashCode(); + } } Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java 2006-07-01 17:18:12 UTC (rev 10077) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/RAMDirectoryProvider.java 2006-07-02 01:41:42 UTC (rev 10078) @@ -0,0 +1,47 @@ +//$Id: $ +package org.hibernate.lucene.store; + +import java.util.Properties; + +import org.apache.lucene.store.RAMDirectory; +import org.hibernate.cfg.Configuration; + +/** + * Use a Lucene RAMDirectory + * + * @author Emmanuel Bernard + * @author Sylvain Vieujot + */ +public class RAMDirectoryProvider implements DirectoryProvider<RAMDirectory> { + + private RAMDirectory directory; + private String indexName; + + public void initialize(Class entity, Configuration cfg, Properties properties) { + indexName = DirectoryProviderFactory.getTypeName( entity ); + directory = new RAMDirectory(); + } + + public RAMDirectory getDirectory() { + return directory; + } + + @Override + public boolean equals(Object obj) { + // this code is actually broken since the value change after initialize call + // but from a practical POV this is fine since we only call this method + // after initialize call + if (obj == this) return true; + if (obj == null || ! (obj instanceof RAMDirectoryProvider) ) return false; + return indexName.equals( ( (RAMDirectoryProvider) obj).indexName ); + } + + @Override + public int hashCode() { + // this code is actually broken since the value change after initialize call + // but from a practical POV this is fine since we only call this method + // after initialize call + return indexName.hashCode(); + } + +} |
From: <hib...@li...> - 2006-07-01 17:18:20
|
Author: epbernard Date: 2006-07-01 13:18:12 -0400 (Sat, 01 Jul 2006) New Revision: 10077 Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java Log: ANN-385 abstract lucene directory. Add sylvain as contributor, move ...impl to ...directory_provider Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-01 15:11:45 UTC (rev 10076) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-01 17:18:12 UTC (rev 10077) @@ -12,7 +12,9 @@ * <code>equals()</code> and <code>hashCode()</code> must guaranty to * return true for a provider pointing to the same underlying Lucene Store * This class must be thread safe regarding <code>getDirectory()</code> + * * @author Emmanuel Bernard + * @author Sylvain Vieujot */ public interface DirectoryProvider<TDirectory extends Directory> { /** get the information toi initialize the directory and build its hashCode */ Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-01 15:11:45 UTC (rev 10076) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-01 17:18:12 UTC (rev 10077) @@ -15,12 +15,19 @@ /** * Create a Lucene directory provider * <p/> - * Lucene directory providers are configured through... - * hibernate.lucene.default.impl=FSDirectory - * hibernate.lucene.toto.impl=FSDirectory - * hibernate.lucene.toto.*= + * Lucene directory providers are configured through properties + * - hibernate.lucene.default.* and + * - hibernate.lucene.<indexname>.* * + * <indexname> properties have precedence over default + * + * The implementation is described by + * hibernate.lucene.[default|indexname].directory_provider + * + * If none is defined the default value is FSDirectory + * * @author Emmanuel Bernard + * @author Sylvain Vieujot */ public class DirectoryProviderFactory { public List<DirectoryProvider> providers = new ArrayList<DirectoryProvider>(); @@ -35,7 +42,7 @@ Properties indexProps = getDirectoryProperties( cfg, entity ); //set up the directory - String className = indexProps.getProperty( "impl" ); + String className = indexProps.getProperty( "directory_provider" ); if ( StringHelper.isEmpty( className ) ) { className = DEFAULT_DIRECTORY_PROVIDER; } Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-01 15:11:45 UTC (rev 10076) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-01 17:18:12 UTC (rev 10077) @@ -19,7 +19,9 @@ * Use a Lucene FSDirectory * The base directory is represented by hibernate.lucene.<index>.indexBase * The index is created in <base directory>/<index name> + * * @author Emmanuel Bernard + * @author Sylvain Vieujot */ public class FSDirectoryProvider implements DirectoryProvider { private FSDirectory directory; |
Author: epbernard Date: 2006-07-01 11:11:45 -0400 (Sat, 01 Jul 2006) New Revision: 10076 Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java trunk/HibernateExt/metadata/src/test/org/hibernate/lucene/test/ Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java Log: ANN-385 abstract lucene directory. Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java 2006-07-01 12:50:34 UTC (rev 10075) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/DocumentBuilder.java 2006-07-01 15:11:45 UTC (rev 10076) @@ -4,10 +4,10 @@ import java.beans.Introspector; import java.io.Serializable; import java.lang.reflect.AccessibleObject; +import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; -import java.lang.reflect.AnnotatedElement; import java.util.ArrayList; import java.util.List; @@ -15,10 +15,10 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.Term; -import org.apache.lucene.store.Directory; import org.hibernate.AssertionFailure; import org.hibernate.HibernateException; import org.hibernate.cfg.annotations.Version; +import org.hibernate.lucene.store.DirectoryProvider; //TODO handle attribute (only getters are handled currently) public class DocumentBuilder<T> { @@ -35,15 +35,15 @@ private final List<String> textNames = new ArrayList<String>(); //private final Class<T> beanClass; - private final Directory directory; + private final DirectoryProvider directoryProvider; private String idKeywordName; private final Analyzer analyzer; private Float idBoost; - public DocumentBuilder(Class<?> clazz, Analyzer analyzer, Directory directory) { + public DocumentBuilder(Class<?> clazz, Analyzer analyzer, DirectoryProvider directory) { //this.beanClass = clazz; this.analyzer = analyzer; - this.directory = directory; + this.directoryProvider = directory; for ( Class currClass = clazz; currClass != null ; currClass = currClass.getSuperclass() ) { Method[] methods = currClass.getDeclaredMethods(); @@ -174,8 +174,8 @@ return Introspector.decapitalize( methodName.substring( startIndex ) ); } - public Directory getDirectory() { - return directory; + public DirectoryProvider getDirectoryProvider() { + return directoryProvider; } public Analyzer getAnalyzer() { Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java 2006-07-01 12:50:34 UTC (rev 10075) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/event/LuceneEventListener.java 2006-07-01 15:11:45 UTC (rev 10076) @@ -1,7 +1,6 @@ //$Id$ package org.hibernate.lucene.event; -import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; @@ -18,8 +17,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; import org.hibernate.HibernateException; import org.hibernate.cfg.Configuration; import org.hibernate.event.Initializable; @@ -32,6 +29,8 @@ import org.hibernate.lucene.DocumentBuilder; import org.hibernate.lucene.Environment; import org.hibernate.lucene.Indexed; +import org.hibernate.lucene.store.DirectoryProvider; +import org.hibernate.lucene.store.DirectoryProviderFactory; import org.hibernate.mapping.PersistentClass; /** @@ -48,7 +47,7 @@ private Map<Class, DocumentBuilder<Object>> documentBuilders = new HashMap<Class, DocumentBuilder<Object>>(); //** keep track of the index modifiers per file since 1 index modifier can be present at a time */ - private Map<Directory, Lock> indexLock = new HashMap<Directory, Lock>(); + private Map<DirectoryProvider, Lock> indexLock = new HashMap<DirectoryProvider, Lock>(); private boolean initialized; private static final Log log = LogFactory.getLog( LuceneEventListener.class ); @@ -81,63 +80,27 @@ throw new HibernateException( "Failed to instantiate lucene analyzer with type " + analyzerClassName ); } - // Initialize index parent dir - String indexDirName = cfg.getProperty( Environment.INDEX_BASE_DIR ); - File indexDir = indexDirName != null ? new File( indexDirName ) : new File( "." ); - - if ( !( indexDir.exists() && indexDir.isDirectory() ) ) { - //TODO create the directory - throw new HibernateException( "Index directory does not exists: " + Environment.INDEX_BASE_DIR ); - } - if ( !indexDir.canWrite() ) { - throw new HibernateException( "Cannot write into index directory: " + Environment.INDEX_BASE_DIR ); - } - log.info( "Setting index dir to " + indexDir ); - Iterator iter = cfg.getClassMappings(); + DirectoryProviderFactory factory = new DirectoryProviderFactory(); while ( iter.hasNext() ) { PersistentClass clazz = (PersistentClass) iter.next(); Class<?> mappedClass = clazz.getMappedClass(); if ( mappedClass != null ) { if ( mappedClass.isAnnotationPresent( Indexed.class ) ) { - Indexed indexed = mappedClass.getAnnotation( Indexed.class ); - String fileName = getTypeName( mappedClass, indexed.index() ); - File file = new File( indexDir, fileName ); - Directory directory; - try { - boolean create = !file.exists(); - directory = FSDirectory.getDirectory( file.getCanonicalPath(), create ); - if (create) { - IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create ); - iw.close(); - } + DirectoryProvider provider = factory.createDirectoryProvider( mappedClass, cfg ); + final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>( + mappedClass, analyzer, provider + ); + if ( ! indexLock.containsKey( provider ) ) { + indexLock.put( provider, new ReentrantLock() ); } - catch (IOException ie) { - throw new HibernateException("Unable to initialize index: " + indexed.index(), ie ); - } - final DocumentBuilder<Object> documentBuilder = new DocumentBuilder<Object>( mappedClass, analyzer, directory ); - if ( ! indexLock.containsKey( directory ) ) { - indexLock.put( directory, new ReentrantLock() ); - } documentBuilders.put( mappedClass, documentBuilder ); -// try { -// IndexWriter iw = new IndexWriter( documentBuilder.getFile(), new StopAnalyzer(), true ); -// iw.close(); -// } -// catch (IOException ioe) { -// throw new HibernateException(ioe); -// } - log.info( "index file: " + file.getAbsolutePath() ); } } } initialized = true; } - private static String getTypeName(Class clazz, String name) { - return "".equals( name ) ? clazz.getName() : name; - } - public void onPostDelete(PostDeleteEvent event) { DocumentBuilder builder = documentBuilders.get( event.getEntity().getClass() ); if ( builder != null ) { @@ -166,12 +129,12 @@ private void remove(DocumentBuilder<?> builder, Serializable id) { Term term = builder.getTerm( id ); log.debug( "removing: " + term ); - Directory directory = builder.getDirectory(); - Lock lock = indexLock.get( directory ); + DirectoryProvider directoryProvider = builder.getDirectoryProvider(); + Lock lock = indexLock.get( directoryProvider ); lock.lock(); try { - IndexReader reader = IndexReader.open( directory ); + IndexReader reader = IndexReader.open( directoryProvider.getDirectory() ); reader.deleteDocuments( term ); reader.close(); } @@ -185,14 +148,16 @@ private void add(final Object entity, final DocumentBuilder<Object> builder, final Serializable id) { Document doc = builder.getDocument( entity, id ); - if( log.isDebugEnabled() ) { + if ( log.isDebugEnabled() ) { log.debug( "adding: " + doc ); } - Directory directory = builder.getDirectory(); - Lock lock = indexLock.get( directory ); + DirectoryProvider directoryProvider = builder.getDirectoryProvider(); + Lock lock = indexLock.get( directoryProvider ); lock.lock(); try { - IndexWriter writer = new IndexWriter( directory, builder.getAnalyzer(), false); //have been created at init time + IndexWriter writer = new IndexWriter( + directoryProvider.getDirectory(), builder.getAnalyzer(), false + ); //have been created at init time writer.addDocument( doc ); writer.close(); } Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-01 12:50:34 UTC (rev 10075) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProvider.java 2006-07-01 15:11:45 UTC (rev 10076) @@ -0,0 +1,23 @@ +//$Id: $ +package org.hibernate.lucene.store; + +import java.util.Properties; + +import org.hibernate.cfg.Configuration; +import org.apache.lucene.store.Directory; + + +/** + * Set up and provide a Lucene <code>Directory</code> + * <code>equals()</code> and <code>hashCode()</code> must guaranty to + * return true for a provider pointing to the same underlying Lucene Store + * This class must be thread safe regarding <code>getDirectory()</code> + * @author Emmanuel Bernard + */ +public interface DirectoryProvider<TDirectory extends Directory> { + /** get the information toi initialize the directory and build its hashCode */ + void initialize(Class<?> entity, Configuration cfg, Properties properties); + + TDirectory getDirectory(); +} + Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-01 12:50:34 UTC (rev 10075) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/DirectoryProviderFactory.java 2006-07-01 15:11:45 UTC (rev 10076) @@ -0,0 +1,86 @@ +//$Id: $ +package org.hibernate.lucene.store; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.hibernate.HibernateException; +import org.hibernate.cfg.Configuration; +import org.hibernate.lucene.Indexed; +import org.hibernate.util.ReflectHelper; +import org.hibernate.util.StringHelper; + +/** + * Create a Lucene directory provider + * <p/> + * Lucene directory providers are configured through... + * hibernate.lucene.default.impl=FSDirectory + * hibernate.lucene.toto.impl=FSDirectory + * hibernate.lucene.toto.*= + * + * @author Emmanuel Bernard + */ +public class DirectoryProviderFactory { + public List<DirectoryProvider> providers = new ArrayList<DirectoryProvider>(); + private static String LUCENE_PREFIX = "hibernate.lucene."; + private static String LUCENE_DEFAULT = LUCENE_PREFIX + "default."; + private static String DEFAULT_DIRECTORY_PROVIDER = FSDirectoryProvider.class.getName(); + + + + public DirectoryProvider createDirectoryProvider(Class<?> entity, Configuration cfg) { + //get properties + Properties indexProps = getDirectoryProperties( cfg, entity ); + + //set up the directory + String className = indexProps.getProperty( "impl" ); + if ( StringHelper.isEmpty( className ) ) { + className = DEFAULT_DIRECTORY_PROVIDER; + } + DirectoryProvider provider = null; + try { + Class<DirectoryProvider> directoryClass = ReflectHelper.classForName( + className, DirectoryProviderFactory.class + ); + provider = directoryClass.newInstance(); + } + catch (Exception e) { + throw new HibernateException( "Unable to instanciate directory provider: " + className ); + } + provider.initialize( entity, cfg, indexProps ); + int index = providers.indexOf( provider ); + if ( index != -1 ) { + //share the same Directory provider for the same underlying store + return providers.get( index ); + } + else { + providers.add( provider ); + return provider; + } + } + + private static Properties getDirectoryProperties(Configuration cfg, Class<?> entity) { + Properties props = cfg.getProperties(); + String indexName = LUCENE_PREFIX + getTypeName( entity ); + Properties indexProps = new Properties(); + Properties indexSpecificProps = new Properties(); + for ( Map.Entry<String, ?> entry : ( (Map<String, ?>) props ).entrySet() ) { + String key = entry.getKey(); + if ( key.startsWith( LUCENE_DEFAULT ) ) { + indexProps.setProperty( key.substring( LUCENE_DEFAULT.length() ), (String) entry.getValue() ); + } + else if ( key.startsWith( indexName ) ) { + indexSpecificProps.setProperty( key.substring( indexName.length() ), (String) entry.getValue() ); + } + } + indexProps.putAll( indexSpecificProps ); + return indexProps; + } + + public static String getTypeName(Class<?> clazz) { + String name = clazz.getAnnotation(Indexed.class).index(); + return "".equals( name ) ? clazz.getName() : name; + } +} Added: trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-01 12:50:34 UTC (rev 10075) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/lucene/store/FSDirectoryProvider.java 2006-07-01 15:11:45 UTC (rev 10076) @@ -0,0 +1,61 @@ +//$Id: $ +package org.hibernate.lucene.store; + +import java.io.File; +import java.io.IOException; +import java.text.MessageFormat; +import java.util.Properties; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.hibernate.HibernateException; +import org.hibernate.cfg.Configuration; + +/** + * Use a Lucene FSDirectory + * The base directory is represented by hibernate.lucene.<index>.indexBase + * The index is created in <base directory>/<index name> + * @author Emmanuel Bernard + */ +public class FSDirectoryProvider implements DirectoryProvider { + private FSDirectory directory; + private static Log log = LogFactory.getLog( FSDirectoryProvider.class ); + + public void initialize(Class entity, Configuration cfg, Properties properties) { + String indexBase = properties.getProperty( "indexBase", "." ); + File indexDir = new File( indexBase ); + + if ( !( indexDir.exists() && indexDir.isDirectory() ) ) { + //TODO create the directory? + throw new HibernateException( MessageFormat.format( "Index directory does not exists: {0}", indexBase ) ); + } + if ( !indexDir.canWrite() ) { + throw new HibernateException( "Cannot write into index directory: " + indexBase ); + } + log.info( "Setting index dir to " + indexDir ); + + String fileName = DirectoryProviderFactory.getTypeName( entity ); + File file = new File( indexDir, fileName ); + + try { + boolean create = !file.exists(); + directory = FSDirectory.getDirectory( file.getCanonicalPath(), create ); + if (create) { + IndexWriter iw = new IndexWriter(directory, new StandardAnalyzer(), create ); + iw.close(); + } + } + catch (IOException e) { + throw new HibernateException( "Unable to initialize index: " + fileName, e ); + } + + } + + public Directory getDirectory() { + return directory; + } +} |
From: <hib...@li...> - 2006-07-01 12:50:38
|
Author: epbernard Date: 2006-07-01 08:50:34 -0400 (Sat, 01 Jul 2006) New Revision: 10075 Modified: trunk/Hibernate3/src/org/hibernate/connection/DatasourceConnectionProvider.java Log: ease use of DatasourceConnectionProvider (InjectedDSCP needs that, as well as the Lucene JDBCDirectory) Modified: trunk/Hibernate3/src/org/hibernate/connection/DatasourceConnectionProvider.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/connection/DatasourceConnectionProvider.java 2006-06-30 23:34:54 UTC (rev 10074) +++ trunk/Hibernate3/src/org/hibernate/connection/DatasourceConnectionProvider.java 2006-07-01 12:50:34 UTC (rev 10075) @@ -28,11 +28,11 @@ private static final Log log = LogFactory.getLog(DatasourceConnectionProvider.class); - protected DataSource getDataSource() { + public DataSource getDataSource() { return ds; } - protected void setDataSource(DataSource ds) { + public void setDataSource(DataSource ds) { this.ds = ds; } |
From: <hib...@li...> - 2006-06-30 23:35:01
|
Author: epbernard Date: 2006-06-30 19:34:54 -0400 (Fri, 30 Jun 2006) New Revision: 10074 Added: trunk/Hibernate3/test/org/hibernate/test/sql/Speech.java Modified: trunk/Hibernate3/test/org/hibernate/test/sql/General.hbm.xml trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java Log: HHH-1871 error when mixing entities and scalar Modified: trunk/Hibernate3/test/org/hibernate/test/sql/General.hbm.xml =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/sql/General.hbm.xml 2006-06-30 06:12:36 UTC (rev 10073) +++ trunk/Hibernate3/test/org/hibernate/test/sql/General.hbm.xml 2006-06-30 23:34:54 UTC (rev 10074) @@ -99,6 +99,14 @@ </component> </class> + <class name="Speech"> + <id name="id" type="integer"> + <generator class="increment"/> + </id> + <property name="name" column="name"/> + <property name="length" column="flength"/> + </class> + <resultset name="org-emp-regionCode"> <return-scalar column="regionCode" type="string"/> <return alias="org" class="Organization"/> @@ -130,6 +138,15 @@ <return-scalar column="volume"/> </resultset> + <resultset name="speech"> + <return alias="sp" class="Speech"> + <return-property name="id" column="id"/> + <return-property name="name" column="name"/> + <return-property name="length" column="flength"/> + </return> + <return-scalar column="scalarName"/> + </resultset> + <sql-query name="spaceship" resultset-ref="spaceship-vol"> select id as id, fld_name as name, Modified: trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java 2006-06-30 06:12:36 UTC (rev 10073) +++ trunk/Hibernate3/test/org/hibernate/test/sql/GeneralTest.java 2006-06-30 23:34:54 UTC (rev 10074) @@ -542,6 +542,25 @@ } + public void testMixAndMatchEntityScalar() { + Session s = openSession(); + Transaction t = s.beginTransaction(); + Speech speech = new Speech(); + speech.setLength( 23d ); + speech.setName( "Mine" ); + s.persist( speech ); + s.flush(); + s.clear(); + + List l = s.createSQLQuery( "select name, id, flength, name as scalarName from Speech" ) + .setResultSetMapping( "speech" ) + .list(); + assertEquals( l.size(), 1 ); + + t.rollback(); + s.close(); + } + private double extractDoubleValue(Object value) { if ( value instanceof BigInteger ) { return ( ( BigInteger ) value ).doubleValue(); Added: trunk/Hibernate3/test/org/hibernate/test/sql/Speech.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/sql/Speech.java 2006-06-30 06:12:36 UTC (rev 10073) +++ trunk/Hibernate3/test/org/hibernate/test/sql/Speech.java 2006-06-30 23:34:54 UTC (rev 10074) @@ -0,0 +1,35 @@ +//$Id: $ +package org.hibernate.test.sql; + +/** + * @author Emmanuel Bernard + */ +public class Speech { + private Integer id; + private String name; + private Double length; + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public Double getLength() { + return length; + } + + public void setLength(Double length) { + this.length = length; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} |
From: gdilem\@libero\.it <gd...@li...> - 2006-06-30 13:24:24
|
Hi all I do a saveOrUpdate on my instance. After I've done it I try to get the id of the same instance passed to sav= eOrUpdate method. I get an id that is the real id minus one. I have in the data base an entry with id greater of one than what i get o= n my log. any help? Please help i'm going crazy |
From: <hib...@li...> - 2006-06-30 06:13:09
|
Author: ste...@jb... Date: 2006-06-30 02:12:36 -0400 (Fri, 30 Jun 2006) New Revision: 10073 Modified: branches/HQL_ANTLR_2/Hibernate3/g2/parse.g branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g Log: added PROP_FETCH to remove some non-determinism Modified: branches/HQL_ANTLR_2/Hibernate3/g2/parse.g =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/g2/parse.g 2006-06-30 06:07:52 UTC (rev 10072) +++ branches/HQL_ANTLR_2/Hibernate3/g2/parse.g 2006-06-30 06:12:36 UTC (rev 10073) @@ -125,6 +125,7 @@ ENTITY_NAME; COLLECTION_ROLE; CLASS_NAME; + PROP_FETCH; // Literal tokens. CONSTANT; @@ -339,7 +340,9 @@ ; propertyFetch - : FETCH ALL! PROPERTIES! + : FETCH ALL! PROPERTIES! { + #propertyFetch = #( [PROP_FETCH, "prop-fetch"] ); + } ; Modified: branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g 2006-06-30 06:07:52 UTC (rev 10072) +++ branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g 2006-06-30 06:12:36 UTC (rev 10073) @@ -143,7 +143,7 @@ ; entityPersisterReference! - : en:ENTITY_NAME (a:ALIAS)? (pf:FETCH)? { + : en:ENTITY_NAME (a:ALIAS)? (pf:PROP_FETCH)? { #entityPersisterReference = buildEntityPersisterReference( en, a, pf ); } ; @@ -156,7 +156,7 @@ : e:entityPersisterReference (on:ON)? { handleAdHocJoinNode( #e, joinType, on ); } - | (f:FETCH)? (a:ALIAS)? (pf:FETCH)? { pushExplicitJoinContext( joinType, #f, #a, #pf ); } prop:propertyPath (with:WITH)? { + | (f:FETCH)? (a:ALIAS)? (pf:PROP_FETCH)? { pushExplicitJoinContext( joinType, #f, #a, #pf ); } prop:propertyPath (with:WITH)? { popExplicitJoinContext(); } ; |
From: <hib...@li...> - 2006-06-30 06:08:12
|
Author: ste...@jb... Date: 2006-06-30 02:07:52 -0400 (Fri, 30 Jun 2006) New Revision: 10072 Modified: branches/HQL_ANTLR_2/Hibernate3/build.xml Log: redid parse phase and simplified portions of resolve phase Modified: branches/HQL_ANTLR_2/Hibernate3/build.xml =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/build.xml 2006-06-30 05:59:31 UTC (rev 10071) +++ branches/HQL_ANTLR_2/Hibernate3/build.xml 2006-06-30 06:07:52 UTC (rev 10072) @@ -194,7 +194,13 @@ target="${grammar.dir}/sql-gen.g" outputdirectory="${parser.src}" /> - <touch file="${parser.src}/.antlr_run"/> + + <!-- The HQL parsing grammar --> + <antlrtask target="g2/parse.g" outputdirectory="${parser.src}" /> + <!-- The HQL resolver grammar --> + <antlrtask target="g2/resolve.g" outputdirectory="${parser.src}" /> + + <touch file="${parser.src}/.antlr_run"/> </target> <target name="cleanantlr" depends="init" |
From: <hib...@li...> - 2006-06-30 05:59:49
|
Author: ste...@jb... Date: 2006-06-30 01:59:31 -0400 (Fri, 30 Jun 2006) New Revision: 10071 Added: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ParserTest.java branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ResolverTest.java Removed: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/HqlResolverTest.java Log: redid parse phase and simplified portions of resolve phase Deleted: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/HqlResolverTest.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/HqlResolverTest.java 2006-06-30 05:55:55 UTC (rev 10070) +++ branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/HqlResolverTest.java 2006-06-30 05:59:31 UTC (rev 10071) @@ -1,191 +0,0 @@ -package org.hibernate.test.hql; - -import antlr.RecognitionException; -import antlr.TokenStreamException; -import antlr.collections.AST; -import junit.framework.Test; -import junit.framework.TestSuite; -import org.hibernate.hql.antlr.HqlRTokenTypes; -import org.hibernate.hql.ast.resolve.HqlResolver; -import org.hibernate.hql.ast.resolve.StatementNode; -import org.hibernate.hql.ast.resolve.SelectStatementNode; -import org.hibernate.hql.ast.util.ASTPrinter; -import org.hibernate.hql.ast.util.NodeTraverser; -import org.hibernate.test.TestCase; -import org.hibernate.engine.SessionFactoryImplementor; - -/** - * Tests the new HQL resolver phase. - * <br>User: Joshua Davis - * Date: Apr 1, 2006 - * Time: 7:25:37 AM - */ -public class HqlResolverTest extends TestCase { - private static final ASTPrinter hqlrPrinter = generatePrinter(); - - private static ASTPrinter generatePrinter() { - ASTPrinter rtn = new ASTPrinter( HqlRTokenTypes.class ); - rtn.setShowClassNames( false ); - return rtn; - } - - public HqlResolverTest(String n) { - super( n ); - } - - public void testSimpleHql() throws Exception { - // First, get an AST by parsing some HQL text. - AST ast = resolve( "from Animal" ); - // Assert: - // The root node should be a statement. - assertTrue( ast instanceof SelectStatementNode ); - } - - public void testSelectExpression() throws Throwable { - resolve( "select a from Animal a" ); - resolve( "select a.mother as m from Animal as a" ); - } - - public void testExplicitImplicitJoin() throws Exception { - AST ast = resolve( "from Animal a left join fetch a.mother.mother.mother as ggm where ggm.name like '%weeble%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 3, ast ); - } - - public void testExplicitCollectionJoin() throws Throwable { - AST ast = resolve( "from Animal as a inner join a.offspring as o where o.name like '%boots%'" ); - } - - public void testSimpleImplicitJoin() throws Exception { - AST ast = resolve( "from Animal a where a.mother.name like '%mary%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 1, ast ); - - ast = resolve( "from Animal a where a.mother.mother.name like '%weeble%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 2, ast ); - - ast = resolve( "from Animal a where a.mother.mother = ?" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 1, ast ); - } - - public void testUnqualifiedPropertyReference() throws Exception { - AST ast = resolve( "from Animal where name like '%mary%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 0, ast ); - - ast = resolve( "from Animal where mother.name like '%mary%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 1, ast ); - } - - public void testThetaJoins() throws Exception { - AST ast = resolve( "from Animal a, Animal b where a.mother.id = b.id and b.name like '%mary%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 1, ast ); - - ast = resolve( "from Animal a, Animal b inner join b.mother as c where a.mother.id = b.id and b.name like '%mary%'" ); - assertTrue( ast instanceof SelectStatementNode ); - JoinCounter.assertJoinCount( 2, ast ); - } - - public void testIndexOperation() throws Throwable { - AST ast = null; - - // todo : these are all goofed up... - ast = resolve( "from Zoo zoo where zoo.mammals['dog'].father.description like '%black%'" ); - assertTrue( ast instanceof StatementNode ); - - ast = resolve( "from Zoo zoo join zoo.animals an where zoo.mammals[ index(an) ] = an" ); - assertTrue( ast instanceof StatementNode ); - } - - //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - private AST resolve(String hql) throws RecognitionException, TokenStreamException { - AST hqlAst = HqlParserTest.doParse(hql,false); - // Now, pass it though the resolver phase, which yeilds - // a processed HQL AST. - HqlResolver resolver = new HqlResolver( getSessionFactoryImplementor() ); - resolver.statement( hqlAst ); - AST resolvedHql = resolver.getAST(); - System.out.println( - hqlrPrinter.showAsString( resolvedHql, "Resolved AST : " + resolvedHql.toStringTree() + "" ) - ); - return resolvedHql; - } - - protected SessionFactoryImplementor getSessionFactoryImplementor() { - SessionFactoryImplementor factory = ( SessionFactoryImplementor ) getSessions(); - if ( factory == null ) { - throw new NullPointerException( "Unable to create factory!" ); - } - return factory; - } - - private static class JoinCounter implements NodeTraverser.VisitationStrategy { - int count = 0; - public void visit(AST node) { - if ( node.getType() == HqlRTokenTypes.JOIN ) { - count++; - } - } - public static void assertJoinCount(int expected, AST tree) { - assertJoinCount( "incorrect join count", expected, tree ); - } - public static void assertJoinCount(String failMessage, int expected, AST tree) { - JoinCounter counter = new JoinCounter(); - NodeTraverser walker = new NodeTraverser( counter ); - walker.traverseDepthFirst( tree ); - assertEquals( failMessage, expected, counter.count ); - } - } - - public static Test suite() { - return new TestSuite(HqlResolverTest.class); - } - - protected String[] getMappings() { - return new String[]{ - "hql/Animal.hbm.xml", - "hql/EntityWithCrazyCompositeKey.hbm.xml", - "batchfetch/ProductLine.hbm.xml", - "cid/Customer.hbm.xml", - "cid/Order.hbm.xml", - "cid/LineItem.hbm.xml", - "cid/Product.hbm.xml", - "legacy/Baz.hbm.xml", - "legacy/Category.hbm.xml", - "legacy/Commento.hbm.xml", - "legacy/Container.hbm.xml", - "legacy/Custom.hbm.xml", - "legacy/Eye.hbm.xml", - "legacy/Fee.hbm.xml", - "legacy/FooBar.hbm.xml", - "legacy/Fum.hbm.xml", - "legacy/Glarch.hbm.xml", - "legacy/Holder.hbm.xml", - "legacy/Many.hbm.xml", - "legacy/Marelo.hbm.xml", - "legacy/MasterDetail.hbm.xml", - "legacy/Middle.hbm.xml", - "legacy/Multi.hbm.xml", - "legacy/Nameable.hbm.xml", - "legacy/One.hbm.xml", - "legacy/Qux.hbm.xml", - "legacy/Simple.hbm.xml", - "legacy/SingleSeveral.hbm.xml", - "legacy/WZ.hbm.xml", - "legacy/UpDown.hbm.xml", - "compositeelement/Parent.hbm.xml", - "onetoone/joined/Person.hbm.xml", - "hql/CrazyIdFieldNames.hbm.xml" - }; - } - - protected boolean recreateSchema() { - // we do not need to create the schema for these parser tests - return false; - } -} Added: branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ParserTest.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ParserTest.java 2006-06-30 05:55:55 UTC (rev 10070) +++ branches/HQL_ANTLR_2/Hibernate3/test/org/hibernate/test/hql/redesign/ParserTest.java 2006-06-30 05:59:31 UTC (rev 10071) @@ -0,0 +1,1153 @@ +package org.hibernate.test.hql.redesign; + +import junit.framework.TestCase; +import org.hibernate.hql.antlr.ParseTokenTypes; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.util.ASTIterator; +import org.hibernate.hql.ast.parse.HqlParser; +import org.hibernate.hql.ast.tree.Node; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import antlr.collections.AST; +import antlr.RecognitionException; +import antlr.TokenStreamException; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; + + +/** + * todo: describe ParserTest + * + * @author Steve Ebersole + */ +public class ParserTest extends TestCase implements ParseTokenTypes { + + public static final Log log = LogFactory.getLog( ParserTest.class ); + + public static final String CONSTANT = "constant-value"; + + public void testEntityReferenceRule() throws Throwable { + String entityName = "com.acme.XYZ"; + HqlParser parser = buildHqlParser( entityName ); + parser.fromClassOrOuterQueryPath(); + AST result = parser.getAST().getFirstChild(); + show( result, "entity-name-result" ); + assertEquals( ENTITY_NAME, result.getType() ); + assertEquals( entityName, result.getText() ); + + entityName = "XYZ"; + parser = buildHqlParser( entityName ); + parser.fromClassOrOuterQueryPath(); + result = parser.getAST().getFirstChild(); + show( result, "entity-name-result" ); + assertEquals( ENTITY_NAME, result.getType() ); + assertEquals( entityName, result.getText() ); + } + + public void testSimpleFrom() throws Throwable { + String entityName = "com.acme.XYZ"; + HqlParser parser = buildHqlParser( "from " + entityName + " as e" ); + parser.statement(); + AST result = parser.getAST(); + show( result, "simple-from-result" ); + } + + public void testConstantUsage() throws Throwable { + HqlParser parser = buildHqlParser( "from com.acme.XYZ where prop = org.hibernate.test.hql.redesign.ParserTest.CONSTANT" ); + parser.statement(); + AST result = parser.getAST(); + show( result, "constant" ); + AST constantNode = result.getFirstChild().getNextSibling().getFirstChild().getFirstChild().getNextSibling(); + assertEquals( JAVA_CONSTANT, constantNode.getType() ); + + // apply control checks + parser = buildHqlParser( "from com.acme.XYZ where prop = compProp.subProp" ); + parser.statement(); + result = parser.getAST(); + show( result, "constant-control" ); + AST checkNode = result.getFirstChild().getNextSibling().getFirstChild().getFirstChild().getNextSibling(); + assertEquals( DOT, checkNode.getType() ); + + } + + public void testEntityNamePathWithKeyword() throws Exception { + parse( "from org.hibernate.test.Inner" ); + } + + public void testWhereClauseIdentPrimaryWithEmbeddedKeyword() throws Exception { + parse( "from org.hibernate.test.Inner i where i.outer.inner.middle = 'xyz'" ); + } + + public void testDynamicInstantiation() throws Exception { + parse( "select new list(a, mate) from Animal a join a.mate as mate" ); + parse( "select new Family(mother, mate, offspr) from eg.DomesticCat as mother join mother.mate as mate left join mother.kittens as offspr" ); + } + + public void testListOrMapKeywordReference() throws Exception { + parse( "select p from eg.NameList nl, eg.Person p where p.name = some elements(nl.names)" ); + parse( "select p from eg.NameList list, eg.Person p where p.name = some elements(list.names)" ); + parse( "select p from eg.NameList map, eg.Person p where p.name = some elements(map.names)" ); + } + + public void testExplicitPropertyJoin() throws Exception { + parse( "from eg.Cat as cat inner join fetch cat.mate as m fetch all properties left join fetch cat.kittens as k" ); + } + + private void show(AST node, String header) { + ASTPrinter printer = new ASTPrinter( ParseTokenTypes.class ); + printer.setShowClassNames( false ); + log.info( printer.showAsString( node, header ) ); + } + + // tests copied over from org.hibernate.test.hql.HqlParserTest ~~~~~~~~~~~~ + + public void testUnion() throws Exception { + parse("from Animal a where a in (from Cat union from Dog) "); + } + + /** + * Section 9.2 - from * + */ + public void testDocoExamples92() throws Exception { + parse( "from eg.Cat" ); + parse( "from eg.Cat as cat" ); + parse( "from eg.Cat cat" ); + parse( "from Formula, Parameter" ); + parse( "from Formula as form, Parameter as param" ); + } + + /** + * Section 9.3 - Associations and joins * + */ + public void testDocoExamples93() throws Exception { + parse( "from eg.Cat as cat inner join cat.mate as mate left outer join cat.kittens as kitten" ); + parse( "from eg.Cat as cat left join cat.mate.kittens as kittens" ); + parse( "from Formula form full join form.parameter param" ); + parse( "from eg.Cat as cat join cat.mate as mate left join cat.kittens as kitten" ); + parse( "from eg.Cat as cat\ninner join fetch cat.mate\nleft join fetch cat.kittens" ); + } + + /** + * Section 9.4 - Select * + */ + public void testDocoExamples94() throws Exception { + parse( "select mate from eg.Cat as cat inner join cat.mate as mate" ); + parse( "select cat.mate from eg.Cat cat" ); + parse( "select elements(cat.kittens) from eg.Cat cat" ); + parse( "select cat.name from eg.DomesticCat cat where cat.name like 'fri%'" ); + parse( "select cust.name.firstName from Customer as cust" ); + parse( "select mother, offspr, mate.name from eg.DomesticCat\n" + + " as mother inner join mother.mate as mate left outer join\n" + + "mother.kittens as offspr" ); + parse( "select new Family(mother, mate, offspr)\n" + + "from eg.DomesticCat as mother\n" + + "join mother.mate as mate\n" + + "left join mother.kittens as offspr\n" ); + } + + /** + * Section 9.5 - Aggregate functions * + */ + public void testDocoExamples95() throws Exception { + parse( "select avg(cat.weight), sum(cat.weight), max(cat.weight), count(cat)\n" + + "from eg.Cat cat" ); + parse( "select cat, count( elements(cat.kittens) )\n" + + " from eg.Cat cat group by cat" ); + parse( "select distinct cat.name from eg.Cat cat" ); + parse( "select count(distinct cat.name), count(cat) from eg.Cat cat" ); + } + + /** + * Section 9.6 - Polymorphism * + */ + public void testDocoExamples96() throws Exception { + parse( "from eg.Cat as cat" ); + parse( "from java.lang.Object o" ); + parse( "from eg.Named n, eg.Named m where n.name = m.name" ); + } + + /** + * Section 9.7 - Where * + */ + public void testDocoExamples97() throws Exception { + parse( "from eg.Cat as cat where cat.name='Fritz'" ); + parse( "select foo\n" + + "from eg.Foo foo, eg.Bar bar\n" + + "where foo.startDate = bar.date\n" ); + parse( "from eg.Cat cat where cat.mate.name is not null" ); + parse( "from eg.Cat cat, eg.Cat rival where cat.mate = rival.mate" ); + parse( "select cat, mate\n" + + "from eg.Cat cat, eg.Cat mate\n" + + "where cat.mate = mate" ); + parse( "from eg.Cat as cat where cat.id = 123" ); + parse( "from eg.Cat as cat where cat.mate.id = 69" ); + parse( "from bank.Person person\n" + + "where person.id.country = 'AU'\n" + + "and person.id.medicareNumber = 123456" ); + parse( "from bank.Account account\n" + + "where account.owner.id.country = 'AU'\n" + + "and account.owner.id.medicareNumber = 123456" ); + parse( "from eg.Cat cat where cat.class = eg.DomesticCat" ); + parse( "from eg.AuditLog log, eg.Payment payment\n" + + "where log.item.class = 'eg.Payment' and log.item.id = payment.id" ); + } + + /** + * Section 9.8 - Expressions * + */ + public void testDocoExamples98() throws Exception { + parse( "from eg.DomesticCat cat where cat.name between 'A' and 'B'" ); + parse( "from eg.DomesticCat cat where cat.name in ( 'Foo', 'Bar', 'Baz' )" ); + parse( "from eg.DomesticCat cat where cat.name not between 'A' and 'B'" ); + parse( "from eg.DomesticCat cat where cat.name not in ( 'Foo', 'Bar', 'Baz' )" ); + parse( "from eg.Cat cat where cat.kittens.size > 0" ); + parse( "from eg.Cat cat where size(cat.kittens) > 0" ); +// This is a little odd. I'm not sure whether 'current' is a keyword. +// parse("from Calendar cal where cal.holidays.maxElement > current date"); +// Using the token 'order' as both a keyword and an identifier works now, but +// the second instance causes some problems because order is valid in the second instance. +// parse("from Order order where maxindex(order.items) > 100"); +// parse("from Order order where minelement(order.items) > 10000"); + parse( "from Order ord where maxindex(ord.items) > 100" ); + parse( "from Order ord where minelement(ord.items) > 10000" ); + + parse( "select mother from eg.Cat as mother, eg.Cat as kit\n" + + "where kit in elements(foo.kittens)" ); + parse( "select p from eg.NameList list, eg.Person p\n" + + "where p.name = some elements(list.names)" ); + parse( "from eg.Cat cat where exists elements(cat.kittens)" ); + parse( "from eg.Player p where 3 > all elements(p.scores)" ); + parse( "from eg.Show show where 'fizard' in indices(show.acts)" ); + + // Yet another example of the pathological 'order' token. +// parse("from Order order where order.items[0].id = 1234"); +// parse("select person from Person person, Calendar calendar\n" +// + "where calendar.holidays['national day'] = person.birthDay\n" +// + "and person.nationality.calendar = calendar"); +// parse("select item from Item item, Order order\n" +// + "where order.items[ order.deliveredItemIndices[0] ] = item and order.id = 11"); +// parse("select item from Item item, Order order\n" +// + "where order.items[ maxindex(order.items) ] = item and order.id = 11"); + + parse( "from Order ord where ord.items[0].id = 1234" ); + parse( "select person from Person person, Calendar calendar\n" + + "where calendar.holidays['national day'] = person.birthDay\n" + + "and person.nationality.calendar = calendar" ); + parse( "select item from Item item, Order ord\n" + + "where ord.items[ ord.deliveredItemIndices[0] ] = item and ord.id = 11" ); + parse( "select item from Item item, Order ord\n" + + "where ord.items[ maxindex(ord.items) ] = item and ord.id = 11" ); + + parse( "select item from Item item, Order ord\n" + + "where ord.items[ size(ord.items) - 1 ] = item" ); + + parse( "from eg.DomesticCat cat where upper(cat.name) like 'FRI%'" ); + + parse( "select cust from Product prod, Store store\n" + + "inner join store.customers cust\n" + + "where prod.name = 'widget'\n" + + "and store.location.name in ( 'Melbourne', 'Sydney' )\n" + + "and prod = all elements(cust.currentOrder.lineItems)" ); + + } + + public void testDocoExamples99() throws Exception { + parse( "from eg.DomesticCat cat\n" + + "order by cat.name asc, cat.weight desc, cat.birthdate" ); + } + + public void testDocoExamples910() throws Exception { + parse( "select cat.color, sum(cat.weight), count(cat)\n" + + "from eg.Cat cat group by cat.color" ); + parse( "select foo.id, avg( elements(foo.names) ), max( indices(foo.names) )\n" + + "from eg.Foo foo group by foo.id" ); + parse( "select cat.color, sum(cat.weight), count(cat)\n" + + "from eg.Cat cat group by cat.color\n" + + "having cat.color in (eg.Color.TABBY, eg.Color.BLACK)" ); + parse( "select cat from eg.Cat cat join cat.kittens kitten\n" + + "group by cat having avg(kitten.weight) > 100\n" + + "order by count(kitten) asc, sum(kitten.weight) desc" ); + } + + public void testDocoExamples911() throws Exception { + parse( "from eg.Cat as fatcat where fatcat.weight > (\n" + + "select avg(cat.weight) from eg.DomesticCat cat)" ); + parse( "from eg.DomesticCat as cat where cat.name = some (\n" + + "select name.nickName from eg.Name as name)\n" ); + parse( "from eg.Cat as cat where not exists (\n" + + "from eg.Cat as mate where mate.mate = cat)" ); + parse( "from eg.DomesticCat as cat where cat.name not in (\n" + + "select name.nickName from eg.Name as name)" ); + } + + public void testDocoExamples912() throws Exception { + parse( "select ord.id, sum(price.amount), count(item)\n" + + "from Order as ord join ord.lineItems as item\n" + + "join item.product as product, Catalog as catalog\n" + + "join catalog.prices as price\n" + + "where ord.paid = false\n" + + "and ord.customer = :customer\n" + + "and price.product = product\n" + + "and catalog.effectiveDate < sysdate\n" + + "and catalog.effectiveDate >= all (\n" + + "select cat.effectiveDate from Catalog as cat where cat.effectiveDate < sysdate)\n" + + "group by ord\n" + + "having sum(price.amount) > :minAmount\n" + + "order by sum(price.amount) desc" ); + + parse( "select ord.id, sum(price.amount), count(item)\n" + + "from Order as ord join ord.lineItems as item join item.product as product,\n" + + "Catalog as catalog join catalog.prices as price\n" + + "where ord.paid = false and ord.customer = :customer\n" + + "and price.product = product and catalog = :currentCatalog\n" + + "group by ord having sum(price.amount) > :minAmount\n" + + "order by sum(price.amount) desc" ); + + parse( "select count(payment), status.name \n" + + "from Payment as payment \n" + + " join payment.currentStatus as status\n" + + " join payment.statusChanges as statusChange\n" + + "where payment.status.name <> PaymentStatus.AWAITING_APPROVAL\n" + + " or (\n" + + " statusChange.timeStamp = ( \n" + + " select max(change.timeStamp) \n" + + " from PaymentStatusChange change \n" + + " where change.payment = payment\n" + + " )\n" + + " and statusChange.user <> :currentUser\n" + + " )\n" + + "group by status.name, status.sortOrder\n" + + "order by status.sortOrder" ); + parse( "select count(payment), status.name \n" + + "from Payment as payment\n" + + " join payment.currentStatus as status\n" + + "where payment.status.name <> PaymentStatus.AWAITING_APPROVAL\n" + + " or payment.statusChanges[ maxIndex(payment.statusChanges) ].user <> :currentUser\n" + + "group by status.name, status.sortOrder\n" + + "order by status.sortOrder" ); + parse( "select account, payment\n" + + "from Account as account\n" + + " left outer join account.payments as payment\n" + + "where :currentUser in elements(account.holder.users)\n" + + " and PaymentStatus.UNPAID = isNull(payment.currentStatus.name, PaymentStatus.UNPAID)\n" + + "order by account.type.sortOrder, account.accountNumber, payment.dueDate" ); + parse( "select account, payment\n" + + "from Account as account\n" + + " join account.holder.users as user\n" + + " left outer join account.payments as payment\n" + + "where :currentUser = user\n" + + " and PaymentStatus.UNPAID = isNull(payment.currentStatus.name, PaymentStatus.UNPAID)\n" + + "order by account.type.sortOrder, account.accountNumber, payment.dueDate" ); + } + + public void testExamples1() throws Exception { + parse( "select new org.hibernate.test.S(s.count, s.address)\n" + + "from s in class Simple" ); + parse( "select s.name, sysdate, trunc(s.pay), round(s.pay) from s in class Simple" ); + parse( "select round(s.pay, 2) from s" ); + parse( "select abs(round(s.pay)) from s in class Simple" ); + parse( "select trunc(round(sysdate)) from s in class Simple" ); + } + + public void testArrayExpr() throws Exception { + parse( "from Order ord where ord.items[0].id = 1234" ); + } + + public void testMultipleActualParameters() throws Exception { + parse( "select round(s.pay, 2) from s" ); + } + + public void testMultipleFromClasses() throws Exception { + parse( "FROM eg.mypackage.Cat qat, com.toadstool.Foo f" ); + parse( "FROM eg.mypackage.Cat qat, org.jabberwocky.Dipstick" ); + } + + public void testFromWithJoin() throws Exception { + parse( "FROM eg.mypackage.Cat qat, com.toadstool.Foo f join net.sf.blurb.Blurb" ); + parse( "FROM eg.mypackage.Cat qat left join com.multijoin.JoinORama , com.toadstool.Foo f join net.sf.blurb.Blurb" ); + } + + public void testSelect() throws Exception { + parse( "SELECT f FROM eg.mypackage.Cat qat, com.toadstool.Foo f join net.sf.blurb.Blurb" ); + parse( "SELECT DISTINCT bar FROM eg.mypackage.Cat qat left join com.multijoin.JoinORama as bar, com.toadstool.Foo f join net.sf.blurb.Blurb" ); + parse( "SELECT count(*) FROM eg.mypackage.Cat qat" ); + parse( "SELECT avg(qat.weight) FROM eg.mypackage.Cat qat" ); + } + + public void testWhere() throws Exception { + parse( "FROM eg.mypackage.Cat qat where qat.name like '%fluffy%' or qat.toes > 5" ); + parse( "FROM eg.mypackage.Cat qat where not qat.name like '%fluffy%' or qat.toes > 5" ); + parse( "FROM eg.mypackage.Cat qat where not qat.name not like '%fluffy%'" ); + parse( "FROM eg.mypackage.Cat qat where qat.name in ('crater','bean','fluffy')" ); + parse( "FROM eg.mypackage.Cat qat where qat.name not in ('crater','bean','fluffy')" ); + parse( "from Animal an where sqrt(an.bodyWeight)/2 > 10" ); + parse( "from Animal an where (an.bodyWeight > 10 and an.bodyWeight < 100) or an.bodyWeight is null" ); + } + + public void testGroupBy() throws Exception { + parse( "FROM eg.mypackage.Cat qat group by qat.breed" ); + parse( "FROM eg.mypackage.Cat qat group by qat.breed, qat.eyecolor" ); + } + + public void testOrderBy() throws Exception { + parse( "FROM eg.mypackage.Cat qat order by avg(qat.toes)" ); + parse( "from Animal an order by sqrt(an.bodyWeight)/2" ); + } + + public void testDoubleLiteral() throws Exception { + parse( "from eg.Cat as tinycat where fatcat.weight < 3.1415" ); + parse( "from eg.Cat as enormouscat where fatcat.weight > 3.1415e3" ); + } + + public void testComplexConstructor() throws Exception { + parse( "select new Foo(count(bar)) from bar" ); + parse( "select new Foo(count(bar),(select count(*) from doofus d where d.gob = 'fat' )) from bar" ); + } + + + public void testInNotIn() throws Exception { + parse( "from foo where foo.bar in ('a' , 'b', 'c')" ); + parse( "from foo where foo.bar not in ('a' , 'b', 'c')" ); + } + + public void testOperatorPrecedence() throws Exception { + parse( "from foo where foo.bar = 123 + foo.baz * foo.not" ); + parse( "from foo where foo.bar like 'testzzz' || foo.baz or foo.bar in ('duh', 'gob')" ); + } + + /** + * Tests HQL generated by the other unit tests. + * + * @throws Exception if the HQL could not be parsed. + */ + public void testUnitTestHql() throws Exception { + parse( "select foo from foo in class org.hibernate.test.Foo, fee in class org.hibernate.test.Fee where foo.dependent = fee order by foo.string desc, foo.component.count asc, fee.id" ); + parse( "select foo.foo, foo.dependent from foo in class org.hibernate.test.Foo order by foo.foo.string desc, foo.component.count asc, foo.dependent.id" ); + parse( "select foo from foo in class org.hibernate.test.Foo order by foo.dependent.id, foo.dependent.fi" ); + parse( "SELECT one FROM one IN CLASS org.hibernate.test.One ORDER BY one.value ASC" ); + parse( "SELECT many.one FROM many IN CLASS org.hibernate.test.Many ORDER BY many.one.value ASC, many.one.id" ); + parse( "select foo.id from org.hibernate.test.Foo foo where foo.joinedProp = 'foo'" ); + parse( "from org.hibernate.test.Foo foo inner join fetch foo.foo" ); + parse( "from org.hibernate.test.Baz baz left outer join fetch baz.fooToGlarch" ); + parse( "select foo.foo.foo.string from foo in class org.hibernate.test.Foo where foo.foo = 'bar'" ); + parse( "select foo.foo.foo.foo.string from foo in class org.hibernate.test.Foo where foo.foo.foo = 'bar'" ); + parse( "select foo.foo.foo.string from foo in class org.hibernate.test.Foo where foo.foo.foo.foo.string = 'bar'" ); + parse( "select foo.string from foo in class org.hibernate.test.Foo where foo.foo.foo = 'bar' and foo.foo.foo.foo = 'baz'" ); + parse( "select foo.string from foo in class org.hibernate.test.Foo where foo.foo.foo.foo.string = 'a' and foo.foo.string = 'b'" ); + parse( "from org.hibernate.test.Foo as foo where foo.component.glarch.name is not null" ); + parse( "from org.hibernate.test.Foo as foo left outer join foo.component.glarch as glarch where glarch.name = 'foo'" ); + parse( "from org.hibernate.test.Foo" ); + parse( "from org.hibernate.test.Foo foo left outer join foo.foo" ); + parse( "from org.hibernate.test.Foo, org.hibernate.test.Bar" ); + parse( "from org.hibernate.test.Baz baz left join baz.fooToGlarch, org.hibernate.test.Bar bar join bar.foo" ); + parse( "from org.hibernate.test.Baz baz left join baz.fooToGlarch join baz.fooSet" ); + parse( "from org.hibernate.test.Baz baz left join baz.fooToGlarch join fetch baz.fooSet foo left join fetch foo.foo" ); + parse( "from foo in class org.hibernate.test.Foo where foo.string='osama bin laden' and foo.boolean = true order by foo.string asc, foo.component.count desc" ); + parse( "from foo in class org.hibernate.test.Foo where foo.string='osama bin laden' order by foo.string asc, foo.component.count desc" ); + parse( "select foo.foo from foo in class org.hibernate.test.Foo" ); + parse( "from foo in class org.hibernate.test.Foo where foo.component.count is null order by foo.component.count" ); + parse( "from foo in class org.hibernate.test.Foo where foo.component.name='foo'" ); + parse( "select distinct foo.component.name, foo.component.name from foo in class org.hibernate.test.Foo where foo.component.name='foo'" ); + parse( "select distinct foo.component.name, foo.id from foo in class org.hibernate.test.Foo where foo.component.name='foo'" ); + parse( "from foo in class org.hibernate.test.Foo where foo.id=?" ); + parse( "from foo in class org.hibernate.test.Foo where foo.key=?" ); + parse( "select foo.foo from foo in class org.hibernate.test.Foo where foo.string='fizard'" ); + parse( "from foo in class org.hibernate.test.Foo where foo.component.subcomponent.name='bar'" ); + parse( "select foo.foo from foo in class org.hibernate.test.Foo where foo.foo.id=?" ); + parse( "from foo in class org.hibernate.test.Foo where foo.foo = ?" ); + parse( "from bar in class org.hibernate.test.Bar where bar.string='a string' or bar.string='a string'" ); + parse( "select foo.component.name, elements(foo.component.importantDates) from foo in class org.hibernate.test.Foo where foo.foo.id=?" ); + parse( "select max(elements(foo.component.importantDates)) from foo in class org.hibernate.test.Foo group by foo.id" ); + parse( "select foo.foo.foo.foo from foo in class org.hibernate.test.Foo, foo2 in class org.hibernate.test.Foo where foo = foo2.foo and not not ( not foo.string='fizard' ) and foo2.string between 'a' and (foo.foo.string) and ( foo2.string in ( 'fiz', 'blah') or 1=1 )" ); + parse( "from foo in class org.hibernate.test.Foo where foo.string='from BoogieDown -tinsel town =!@#$^&*())'" ); + parse( "from foo in class org.hibernate.test.Foo where not foo.string='foo''bar'" ); // Added quote quote is an escape + parse( "from foo in class org.hibernate.test.Foo where foo.component.glarch.next is null" ); + parse( " from bar in class org.hibernate.test.Bar where bar.baz.count=667 and bar.baz.count!=123 and not bar.baz.name='1-E-1'" ); + parse( " from i in class org.hibernate.test.Bar where i.baz.name='Bazza'" ); + parse( "select count(distinct foo.foo) from foo in class org.hibernate.test.Foo" ); + parse( "select count(foo.foo.boolean) from foo in class org.hibernate.test.Foo" ); + parse( "select count(*), foo.int from foo in class org.hibernate.test.Foo group by foo.int" ); + parse( "select sum(foo.foo.int) from foo in class org.hibernate.test.Foo" ); + parse( "select count(foo) from foo in class org.hibernate.test.Foo where foo.id=?" ); + parse( "from foo in class org.hibernate.test.Foo where foo.boolean = ?" ); + parse( "select new Foo(fo.x) from org.hibernate.test.Fo fo" ); + parse( "select new Foo(fo.integer) from org.hibernate.test.Foo fo" ); + parse( "select new Foo(fo.x) from org.hibernate.test.Foo fo" ); + parse( "select foo.long, foo.component.name, foo, foo.foo from foo in class org.hibernate.test.Foo" ); + parse( "select avg(foo.float), max(foo.component.name), count(distinct foo.id) from foo in class org.hibernate.test.Foo" ); + parse( "select foo.long, foo.component, foo, foo.foo from foo in class org.hibernate.test.Foo" ); + parse( "from o in class org.hibernate.test.MoreStuff" ); + parse( "from o in class org.hibernate.test.Many" ); + parse( "from o in class org.hibernate.test.Fee" ); + parse( "from o in class org.hibernate.test.Qux" ); + parse( "from o in class org.hibernate.test.Y" ); + parse( "from o in class org.hibernate.test.Fumm" ); + parse( "from o in class org.hibernate.test.X" ); + parse( "from o in class org.hibernate.test.Simple" ); + parse( "from o in class org.hibernate.test.Location" ); + parse( "from o in class org.hibernate.test.Holder" ); + parse( "from o in class org.hibernate.test.Part" ); + parse( "from o in class org.hibernate.test.Baz" ); + parse( "from o in class org.hibernate.test.Vetoer" ); + parse( "from o in class org.hibernate.test.Sortable" ); + parse( "from o in class org.hibernate.test.Contained" ); + parse( "from o in class org.hibernate.test.Stuff" ); + parse( "from o in class org.hibernate.test.Immutable" ); + parse( "from o in class org.hibernate.test.Container" ); + parse( "from o in class org.hibernate.test.X$XX" ); + parse( "from o in class org.hibernate.test.One" ); + parse( "from o in class org.hibernate.test.Foo" ); + parse( "from o in class org.hibernate.test.Fo" ); + parse( "from o in class org.hibernate.test.Glarch" ); + parse( "from o in class org.hibernate.test.Fum" ); + parse( "from n in class org.hibernate.test.Holder" ); + parse( "from n in class org.hibernate.test.Baz" ); + parse( "from n in class org.hibernate.test.Bar" ); + parse( "from n in class org.hibernate.test.Glarch" ); + parse( "from n in class org.hibernate.test.Holder where n.name is not null" ); + parse( "from n in class org.hibernate.test.Baz where n.name is not null" ); + parse( "from n in class org.hibernate.test.Bar where n.name is not null" ); + parse( "from n in class org.hibernate.test.Glarch where n.name is not null" ); + parse( "from n in class org.hibernate.test.Holder" ); + parse( "from n in class org.hibernate.test.Baz" ); + parse( "from n in class org.hibernate.test.Bar" ); + parse( "from n in class org.hibernate.test.Glarch" ); + parse( "from n0 in class org.hibernate.test.Holder, n1 in class org.hibernate.test.Holder where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Baz, n1 in class org.hibernate.test.Holder where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Bar, n1 in class org.hibernate.test.Holder where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Glarch, n1 in class org.hibernate.test.Holder where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Holder, n1 in class org.hibernate.test.Baz where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Baz, n1 in class org.hibernate.test.Baz where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Bar, n1 in class org.hibernate.test.Baz where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Glarch, n1 in class org.hibernate.test.Baz where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Holder, n1 in class org.hibernate.test.Bar where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Baz, n1 in class org.hibernate.test.Bar where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Bar, n1 in class org.hibernate.test.Bar where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Glarch, n1 in class org.hibernate.test.Bar where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Holder, n1 in class org.hibernate.test.Glarch where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Baz, n1 in class org.hibernate.test.Glarch where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Bar, n1 in class org.hibernate.test.Glarch where n0.name = n1.name" ); + parse( "from n0 in class org.hibernate.test.Glarch, n1 in class org.hibernate.test.Glarch where n0.name = n1.name" ); + parse( "from n in class org.hibernate.test.Holder where n.name = :name" ); + parse( "from o in class org.hibernate.test.MoreStuff" ); + parse( "from o in class org.hibernate.test.Many" ); + parse( "from o in class org.hibernate.test.Fee" ); + parse( "from o in class org.hibernate.test.Qux" ); + parse( "from o in class org.hibernate.test.Y" ); + parse( "from o in class org.hibernate.test.Fumm" ); + parse( "from o in class org.hibernate.test.X" ); + parse( "from o in class org.hibernate.test.Simple" ); + parse( "from o in class org.hibernate.test.Location" ); + parse( "from o in class org.hibernate.test.Holder" ); + parse( "from o in class org.hibernate.test.Part" ); + parse( "from o in class org.hibernate.test.Baz" ); + parse( "from o in class org.hibernate.test.Vetoer" ); + parse( "from o in class org.hibernate.test.Sortable" ); + parse( "from o in class org.hibernate.test.Contained" ); + parse( "from o in class org.hibernate.test.Stuff" ); + parse( "from o in class org.hibernate.test.Immutable" ); + parse( "from o in class org.hibernate.test.Container" ); + parse( "from o in class org.hibernate.test.X$XX" ); + parse( "from o in class org.hibernate.test.One" ); + parse( "from o in class org.hibernate.test.Foo" ); + parse( "from o in class org.hibernate.test.Fo" ); + parse( "from o in class org.hibernate.test.Glarch" ); + parse( "from o in class org.hibernate.test.Fum" ); + parse( "select baz.code, min(baz.count) from baz in class org.hibernate.test.Baz group by baz.code" ); + parse( "selecT baz from baz in class org.hibernate.test.Baz where baz.stringDateMap['foo'] is not null or baz.stringDateMap['bar'] = ?" ); + parse( "select baz from baz in class org.hibernate.test.Baz where baz.stringDateMap['now'] is not null" ); + parse( "select baz from baz in class org.hibernate.test.Baz where baz.stringDateMap['now'] is not null and baz.stringDateMap['big bang'] < baz.stringDateMap['now']" ); + parse( "select index(date) from org.hibernate.test.Baz baz join baz.stringDateMap date" ); + parse( "from foo in class org.hibernate.test.Foo where foo.integer not between 1 and 5 and foo.string not in ('cde', 'abc') and foo.string is not null and foo.integer<=3" ); + parse( "from org.hibernate.test.Baz baz inner join baz.collectionComponent.nested.foos foo where foo.string is null" ); + parse( "from org.hibernate.test.Baz baz inner join baz.fooSet where '1' in (from baz.fooSet foo where foo.string is not null)" ); + parse( "from org.hibernate.test.Baz baz where 'a' in elements(baz.collectionComponent.nested.foos) and 1.0 in elements(baz.collectionComponent.nested.floats)" ); + parse( "from org.hibernate.test.Foo foo join foo.foo where foo.foo in ('1','2','3')" ); + parse( "select foo.foo from org.hibernate.test.Foo foo where foo.foo in ('1','2','3')" ); + parse( "select foo.foo.string from org.hibernate.test.Foo foo where foo.foo in ('1','2','3')" ); + parse( "select foo.foo.string from org.hibernate.test.Foo foo where foo.foo.string in ('1','2','3')" ); + parse( "select foo.foo.long from org.hibernate.test.Foo foo where foo.foo.string in ('1','2','3')" ); + parse( "select count(*) from org.hibernate.test.Foo foo where foo.foo.string in ('1','2','3') or foo.foo.long in (1,2,3)" ); + parse( "select count(*) from org.hibernate.test.Foo foo where foo.foo.string in ('1','2','3') group by foo.foo.long" ); + parse( "from org.hibernate.test.Foo foo1 left join foo1.foo foo2 left join foo2.foo where foo1.string is not null" ); + parse( "from org.hibernate.test.Foo foo1 left join foo1.foo.foo where foo1.string is not null" ); + parse( "from org.hibernate.test.Foo foo1 left join foo1.foo foo2 left join foo1.foo.foo foo3 where foo1.string is not null" ); + parse( "select foo.formula from org.hibernate.test.Foo foo where foo.formula > 0" ); + parse( "from org.hibernate.test.Foo as foo join foo.foo as foo2 where foo2.id >'a' or foo2.id <'a'" ); + parse( "from org.hibernate.test.Holder" ); + parse( "from org.hibernate.test.Baz baz left outer join fetch baz.manyToAny" ); + parse( "from org.hibernate.test.Baz baz join baz.manyToAny" ); + parse( "select baz from org.hibernate.test.Baz baz join baz.manyToAny a where index(a) = 0" ); + parse( "select bar from org.hibernate.test.Bar bar where bar.baz.stringDateMap['now'] is not null" ); + parse( "select bar from org.hibernate.test.Bar bar join bar.baz b where b.stringDateMap['big bang'] < b.stringDateMap['now'] and b.stringDateMap['now'] is not null" ); + parse( "select bar from org.hibernate.test.Bar bar where bar.baz.stringDateMap['big bang'] < bar.baz.stringDateMap['now'] and bar.baz.stringDateMap['now'] is not null" ); + parse( "select foo.string, foo.component, foo.id from org.hibernate.test.Bar foo" ); + parse( "select elements(baz.components) from org.hibernate.test.Baz baz" ); + parse( "select bc.name from org.hibernate.test.Baz baz join baz.components bc" ); + parse( "from org.hibernate.test.Foo foo where foo.integer < 10 order by foo.string" ); + parse( "from org.hibernate.test.Fee" ); + parse( "from org.hibernate.test.Holder h join h.otherHolder oh where h.otherHolder.name = 'bar'" ); + parse( "from org.hibernate.test.Baz baz join baz.fooSet foo join foo.foo.foo foo2 where foo2.string = 'foo'" ); + parse( "from org.hibernate.test.Baz baz join baz.fooArray foo join foo.foo.foo foo2 where foo2.string = 'foo'" ); + parse( "from org.hibernate.test.Baz baz join baz.stringDateMap date where index(date) = 'foo'" ); + parse( "from org.hibernate.test.Baz baz join baz.topGlarchez g where index(g) = 'A'" ); + parse( "select index(g) from org.hibernate.test.Baz baz join baz.topGlarchez g" ); + parse( "from org.hibernate.test.Baz baz left join baz.stringSet" ); + parse( "from org.hibernate.test.Baz baz join baz.stringSet str where str='foo'" ); + parse( "from org.hibernate.test.Baz baz left join fetch baz.stringSet" ); + parse( "from org.hibernate.test.Baz baz join baz.stringSet string where string='foo'" ); + parse( "from org.hibernate.test.Baz baz inner join baz.components comp where comp.name='foo'" ); + parse( "from org.hibernate.test.Glarch g inner join g.fooComponents comp where comp.fee is not null" ); + parse( "from org.hibernate.test.Glarch g inner join g.fooComponents comp join comp.fee fee where fee.count > 0" ); + parse( "from org.hibernate.test.Glarch g inner join g.fooComponents comp where comp.fee.count is not null" ); + parse( "from org.hibernate.test.Baz baz left join fetch baz.fooBag" ); + parse( "from org.hibernate.test.Glarch" ); + parse( "from org.hibernate.test.Fee" ); + parse( "from org.hibernate.test.Baz baz left join fetch baz.sortablez order by baz.name asc" ); + parse( "from org.hibernate.test.Baz baz order by baz.name asc" ); + parse( "from org.hibernate.test.Foo foo, org.hibernate.test.Baz baz left join fetch baz.fees" ); + parse( "from org.hibernate.test.Foo foo, org.hibernate.test.Bar bar" ); + parse( "from org.hibernate.test.Foo foo" ); + parse( "from org.hibernate.test.Foo foo, org.hibernate.test.Bar bar, org.hibernate.test.Bar bar2" ); + parse( "from org.hibernate.test.X x" ); + parse( "from org.hibernate.test.Foo foo" ); + parse( "select distinct foo from org.hibernate.test.Foo foo" ); + parse( "from org.hibernate.test.Glarch g where g.multiple.glarch=g and g.multiple.count=12" ); + parse( "from org.hibernate.test.Bar bar left join bar.baz baz left join baz.cascadingBars b where bar.name like 'Bar %'" ); + parse( "select bar, b from org.hibernate.test.Bar bar left join bar.baz baz left join baz.cascadingBars b where bar.name like 'Bar%'" ); + parse( "select bar, b from org.hibernate.test.Bar bar left join bar.baz baz left join baz.cascadingBars b where ( bar.name in (:nameList0_, :nameList1_, :nameList2_) or bar.name in (:nameList0_, :nameList1_, :nameList2_) ) and bar.string = :stringVal" ); + parse( "select bar, b from org.hibernate.test.Bar bar inner join bar.baz baz inner join baz.cascadingBars b where bar.name like 'Bar%'" ); + parse( "select bar, b from org.hibernate.test.Bar bar left join bar.baz baz left join baz.cascadingBars b where bar.name like :name and b.name like :name" ); + parse( "select bar from org.hibernate.test.Bar as bar where bar.x > ? or bar.short = 1 or bar.string = 'ff ? bb'" ); + parse( "select bar from org.hibernate.test.Bar as bar where bar.string = ' ? ' or bar.string = '?'" ); + parse( "from org.hibernate.test.Baz baz, baz.fooArray foo" ); + parse( "from s in class org.hibernate.test.Stuff where s.foo.id = ? and s.id.id = ? and s.moreStuff.id.intId = ? and s.moreStuff.id.stringId = ?" ); + parse( "from s in class org.hibernate.test.Stuff where s.foo.id = ? and s.id.id = ? and s.moreStuff.name = ?" ); + parse( "from s in class org.hibernate.test.Stuff where s.foo.string is not null" ); + parse( "from s in class org.hibernate.test.Stuff where s.foo > '0' order by s.foo" ); + parse( "from ms in class org.hibernate.test.MoreStuff" ); + parse( "from foo in class org.hibernate.test.Foo" ); + parse( "from fee in class org.hibernate.test.Fee" ); + parse( "select new Result(foo.string, foo.long, foo.integer) from foo in class org.hibernate.test.Foo" ); + parse( "select new Result( baz.name, foo.long, count(elements(baz.fooArray)) ) from org.hibernate.test.Baz baz join baz.fooArray foo group by baz.name, foo.long" ); + parse( "select new Result( baz.name, max(foo.long), count(foo) ) from org.hibernate.test.Baz baz join baz.fooArray foo group by baz.name" ); + parse( "select max( elements(bar.baz.fooArray) ) from org.hibernate.test.Bar as bar" ); + parse( "from org.hibernate.test.Baz baz left join baz.fooToGlarch join fetch baz.fooArray foo left join fetch foo.foo" ); + parse( "select baz.name from org.hibernate.test.Bar bar inner join bar.baz baz inner join baz.fooSet foo where baz.name = bar.string" ); + parse( "SELECT baz.name FROM org.hibernate.test.Bar AS bar INNER JOIN bar.baz AS baz INNER JOIN baz.fooSet AS foo WHERE baz.name = bar.string" ); + parse( "select baz.name from org.hibernate.test.Bar bar join bar.baz baz left outer join baz.fooSet foo where baz.name = bar.string" ); + parse( "select baz.name from org.hibernate.test.Bar bar, bar.baz baz, baz.fooSet foo where baz.name = bar.string" ); + parse( "SELECT baz.name FROM org.hibernate.test.Bar AS bar, bar.baz AS baz, baz.fooSet AS foo WHERE baz.name = bar.string" ); + parse( "select baz.name from org.hibernate.test.Bar bar left join bar.baz baz left join baz.fooSet foo where baz.name = bar.string" ); + parse( "select foo.string from org.hibernate.test.Bar bar left join bar.baz.fooSet foo where bar.string = foo.string" ); + parse( "select baz.name from org.hibernate.test.Bar bar left join bar.baz baz left join baz.fooArray foo where baz.name = bar.string" ); + parse( "select foo.string from org.hibernate.test.Bar bar left join bar.baz.fooArray foo where bar.string = foo.string" ); + parse( "select foo from bar in class org.hibernate.test.Bar inner join bar.baz as baz inner join baz.fooSet as foo" ); + parse( "select foo from bar in class org.hibernate.test.Bar inner join bar.baz.fooSet as foo" ); + parse( "select foo from bar in class org.hibernate.test.Bar, bar.baz as baz, baz.fooSet as foo" ); + parse( "select foo from bar in class org.hibernate.test.Bar, bar.baz.fooSet as foo" ); + parse( "from org.hibernate.test.Bar bar join bar.baz.fooArray foo" ); + parse( "from bar in class org.hibernate.test.Bar, foo in elements( bar.baz.fooArray )" ); + parse( "select one.id, elements(one.manies) from one in class org.hibernate.test.One" ); + parse( "select max( elements(one.manies) ) from one in class org.hibernate.test.One" ); + parse( "select one, elements(one.manies) from one in class org.hibernate.test.One" ); + parse( "select one, max(elements(one.manies)) from one in class org.hibernate.test.One group by one" ); + parse( "select elements(baz.fooArray) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "select elements(baz.fooArray) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "select indices(baz.fooArray) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "select baz, max(elements(baz.timeArray)) from baz in class org.hibernate.test.Baz group by baz" ); + parse( "select baz, baz.stringSet.size, count(distinct elements(baz.stringSet)), max(elements(baz.stringSet)) from baz in class org.hibernate.test.Baz group by baz" ); + parse( "select max( elements(baz.timeArray) ) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "select max(elements(baz.stringSet)) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "select size(baz.stringSet) from baz in class org.hibernate.test.Baz where baz.id=?" ); + parse( "from org.hibernate.test.Foo foo where foo.component.glarch.id is not null" ); + parse( "from baz in class org.hibernate.test.Baz" ); + parse( "select elements(baz.stringArray) from baz in class org.hibernate.test.Baz" ); + parse( "from foo in class org.hibernate.test.Foo" ); + parse( "select elements(baz.stringList) from baz in class org.hibernate.test.Baz" ); + parse( "select count(*) from org.hibernate.test.Bar" ); + parse( "select count(*) from b in class org.hibernate.test.Bar" ); + parse( "from g in class org.hibernate.test.Glarch" ); + parse( "select baz, baz from baz in class org.hibernate.test.Baz" ); + parse( "select baz from baz in class org.hibernate.test.Baz order by baz" ); + parse( "from bar in class org.hibernate.test.Bar" ); + parse( "from g in class org.hibernate.test.Glarch" ); + parse( "from f in class org.hibernate.test.Foo" ); + parse( "from o in class org.hibernate.test.One" ); + parse( "from q in class org.hibernate.test.Qux" ); + parse( "select foo from foo in class org.hibernate.test.Foo where foo.string='foo bar'" ); + parse( "from foo in class org.hibernate.test.Foo order by foo.string, foo.date" ); + parse( "from foo in class org.hibernate.test.Foo where foo.class='B'" ); + parse( "from foo in class org.hibernate.test.Foo where foo.class=Bar" ); + parse( "select bar from bar in class org.hibernate.test.Bar, foo in class org.hibernate.test.Foo where bar.string = foo.string and not bar=foo" ); + parse( "from foo in class org.hibernate.test.Foo where foo.string='foo bar'" ); + parse( "select foo from foo in class org.hibernate.test.Foo" ); + parse( "from bar in class org.hibernate.test.Bar where bar.barString='bar bar'" ); + parse( "from t in class org.hibernate.test.Trivial" ); + parse( "from foo in class org.hibernate.test.Foo where foo.date = ?" ); + parse( "from o in class org.hibernate.test.MoreStuff" ); + parse( "from o in class org.hibernate.test.Many" ); + parse( "from o in class org.hibernate.test.Fee" ); + parse( "from o in class org.hibernate.test.Qux" ); + parse( "from o in class org.hibernate.test.Y" ); + parse( "from o in class org.hibernate.test.Fumm" ); + parse( "from o in class org.hibernate.test.X" ); + parse( "from o in class org.hibernate.test.Simple" ); + parse( "from o in class org.hibernate.test.Location" ); + parse( "from o in class org.hibernate.test.Holder" ); + parse( "from o in class org.hibernate.test.Part" ); + parse( "from o in class org.hibernate.test.Baz" ); + parse( "from o in class org.hibernate.test.Vetoer" ); + parse( "from o in class org.hibernate.test.Sortable" ); + parse( "from o in class org.hibernate.test.Contained" ); + parse( "from o in class org.hibernate.test.Stuff" ); + parse( "from o in class org.hibernate.test.Immutable" ); + parse( "from o in class org.hibernate.test.Container" ); + parse( "from o in class org.hibernate.test.X$XX" ); + parse( "from o in class org.hibernate.test.One" ); + parse( "from o in class org.hibernate.test.Foo" ); + parse( "from o in class org.hibernate.test.Fo" ); + parse( "from o in class org.hibernate.test.Glarch" ); + parse( "from o in class org.hibernate.test.Fum" ); + parse( "from q in class org.hibernate.test.Qux where q.stuff is null" ); + parse( "from q in class org.hibernate.test.Qux where q.stuff=?" ); + parse( "from q in class org.hibernate.test.Qux" ); + parse( "from g in class org.hibernate.test.Glarch where g.version=2" ); + parse( "from g in class org.hibernate.test.Glarch where g.next is not null" ); + parse( "from g in class org.hibernate.test.Glarch order by g.order asc" ); + parse( "from foo in class org.hibernate.test.Foo order by foo.string asc" ); + parse( "select parent, child from parent in class org.hibernate.test.Foo, child in class org.hibernate.test.Foo where parent.foo = child" ); + parse( "select count(distinct child.id), count(distinct parent.id) from parent in class org.hibernate.test.Foo, child in class org.hibernate.test.Foo where parent.foo = child" ); + parse( "select child.id, parent.id, child.long from parent in class org.hibernate.test.Foo, child in class org.hibernate.test.Foo where parent.foo = child" ); + parse( "select child.id, parent.id, child.long, child, parent.foo from parent in class org.hibernate.test.Foo, child in class org.hibernate.test.Foo where parent.foo = child" ); + parse( "select parent, child from parent in class org.hibernate.test.Foo, child in class org.hibernate.test.Foo where parent.foo = child and parent.string='a string'" ); + parse( "from fee in class org.hibernate.test.Fee" ); + parse( "from org.hibernate.test.Foo foo where foo.custom.s1 = 'one'" ); + parse( "from im in class org.hibernate.test.Immutable where im = ?" ); + parse( "from foo in class org.hibernate.test.Foo" ); + parse( "from foo in class org.hibernate.test.Foo where foo.char='X'" ); + parse( "select elements(baz.stringArray) from baz in class org.hibernate.test.Baz" ); + parse( "select distinct elements(baz.stringArray) from baz in class org.hibernate.test.Baz" ); + parse( "select elements(baz.fooArray) from baz in class org.hibernate.test.Baz" ); + parse( "from foo in class org.hibernate.test.Fo" ); + parse( "from foo in class org.hibernate.test.Foo where foo.dependent.qux.foo.string = 'foo2'" ); + parse( "from org.hibernate.test.Bar bar where bar.object.id = ? and bar.object.class = ?" ); + parse( "select one from org.hibernate.test.One one, org.hibernate.test.Bar bar where bar.object.id = one.id and bar.object.class = 'O'" ); + parse( "from l in class org.hibernate.test.Location where l.countryCode = 'AU' and l.description='foo bar'" ); + parse( "from org.hibernate.test.Bar bar" ); + parse( "From org.hibernate.test.Bar bar" ); + parse( "From org.hibernate.test.Foo foo" ); + parse( "from o in class org.hibernate.test.Baz" ); + parse( "from o in class org.hibernate.test.Foo" ); + parse( "from f in class org.hibernate.test.Foo" ); + parse( "select fum.id from fum in class org.hibernate.test.Fum where not fum.fum='FRIEND'" ); + parse( "select fum.id from fum in class org.hibernate.test.Fum where not fum.fum='FRIEND'" ); + parse( "from fum in class org.hibernate.test.Fum where not fum.fum='FRIEND'" ); + parse( "from fo in class org.hibernate.test.Fo where fo.id.string like 'an instance of fo'" ); + parse( "from org.hibernate.test.Inner" ); + parse( "from org.hibernate.test.Outer o where o.id.detailId = ?" ); + parse( "from org.hibernate.test.Outer o where o.id.master.id.sup.dudu is not null" ); + parse( "from org.hibernate.test.Outer o where o.id.master.id.sup.id.akey is not null" ); + parse( "select o.id.master.id.sup.dudu from org.hibernate.test.Outer o where o.id.master.id.sup.dudu is not null" ); + parse( "select o.id.master.id.sup.id.akey from org.hibernate.test.Outer o where o.id.master.id.sup.id.akey is not null" ); + parse( "from org.hibernate.test.Outer o where o.id.master.bla = ''" ); + parse( "from org.hibernate.test.Outer o where o.id.master.id.one = ''" ); + parse( "from org.hibernate.test.Inner inn where inn.id.bkey is not null and inn.backOut.id.master.id.sup.id.akey > 'a'" ); + parse( "from org.hibernate.test.Outer as o left join o.id.master m left join m.id.sup where o.bubu is not null" ); + parse( "from org.hibernate.test.Outer as o left join o.id.master.id.sup s where o.bubu is not null" ); + parse( "from org.hibernate.test.Outer as o left join o.id.master m left join o.id.master.id.sup s where o.bubu is not null" ); + parse( "select fum1.fo from fum1 in class org.hibernate.test.Fum where fum1.fo.fum is not null" ); + parse( "from fum1 in class org.hibernate.test.Fum where fum1.fo.fum is not null order by fum1.fo.fum" ); + parse( "select elements(fum1.friends) from fum1 in class org.hibernate.test.Fum" ); + parse( "from fum1 in class org.hibernate.test.Fum, fr in elements( fum1.friends )" ); + parse( "select new Jay(eye) from org.hibernate.test.Eye eye" ); + parse( "from org.hibernate.test.Category cat where cat.name='new foo'" ); + parse( "from org.hibernate.test.Category cat where cat.name='new sub'" ); + parse( "from org.hibernate.test.Up up order by up.id2 asc" ); + parse( "from org.hibernate.test.Down down" ); + parse( "from org.hibernate.test.Up up" ); + parse( "from m in class org.hibernate.test.Master" ); + parse( "from s in class org.hibernate.test.Several" ); + parse( "from s in class org.hibernate.test.Single" ); + parse( "\n" + + " from d in class \n" + + " org.hibernate.test.Detail\n" + + " " ); + parse( "from c in class org.hibernate.test.Category where c.name = org.hibernate.test.Category.ROOT_CATEGORY" ); + parse( "select c from c in class org.hibernate.test.Container, s in class org.hibernate.test.Simple where c.oneToMany[2] = s" ); + parse( "select c from c in class org.hibernate.test.Container, s in class org.hibernate.test.Simple where c.manyToMany[2] = s" ); + parse( "select c from c in class org.hibernate.test.Container, s in class org.hibernate.test.Simple where s = c.oneToMany[2]" ); + parse( "select c from c in class org.hibernate.test.Container, s in class org.hibernate.test.Simple where s = c.manyToMany[2]" ); + parse( "select c from c in class org.hibernate.test.Container where c.oneToMany[0].name = 's'" ); + parse( "select c from c in class org.hibernate.test.Container where c.manyToMany[0].name = 's'" ); + parse( "select c from c in class org.hibernate.test.Container where 's' = c.oneToMany[2 - 2].name" ); + parse( "select c from c in class org.hibernate.test.Container where 's' = c.manyToMany[(3+1)/4-1].name" ); + parse( "select c from c in class org.hibernate.test.Container where c.manyToMany[ maxindex(c.manyToMany) ].count = 2" ); + parse( "select c from c in class org.hibernate.test.Container where c.oneToMany[ c.manyToMany[0].count ].name = 's'" ); + parse( "select c from org.hibernate.test.Container c where c.manyToMany[ c.oneToMany[0].count ].name = 's'" ); + parse( "select count(comp.name) from org.hibernate.test.Container c join c.components comp" ); + parse( "from org.hibernate.test.Parent p left join fetch p.child" ); + parse( "from org.hibernate.test.Parent p join p.child c where c.x > 0" ); + parse( "from org.hibernate.test.Child c join c.parent p where p.x > 0" ); + parse( "from org.hibernate.test.Child" ); + parse( "from org.hibernate.test.MoreStuff" ); + parse( "from org.hibernate.test.Many" ); + parse( "from org.hibernate.test.Fee" ); + parse( "from org.hibernate.test.Qux" ); + parse( "from org.hibernate.test.Fumm" ); + parse( "from org.hibernate.test.Parent" ); + parse( "from org.hibernate.test.Simple" ); + parse( "from org.hibernate.test.Holder" ); + parse( "from org.hibernate.test.Part" ); + parse( "from org.hibernate.test.Baz" ); + parse( "from org.hibernate.test.Vetoer" ); + parse( "from org.hibernate.test.Sortable" ); + parse( "from org.hibernate.test.Contained" ); + parse( "from org.hibernate.test.Circular" ); + parse( "from org.hibernate.test.Stuff" ); + parse( "from org.hibernate.test.Immutable" ); + parse( "from org.hibernate.test.Container" ); + parse( "from org.hibernate.test.One" ); + parse( "from org.hibernate.test.Foo" ); + parse( "from org.hibernate.test.Fo" ); + parse( "from org.hibernate.test.Glarch" ); + parse( "from org.hibernate.test.Fum" ); + parse( "from org.hibernate.test.Glarch g" ); + parse( "from org.hibernate.test.Part" ); + parse( "from org.hibernate.test.Baz baz join baz.parts" ); + parse( "from c in class org.hibernate.test.Child where c.parent.count=66" ); + parse( "from org.hibernate.te... [truncated message content] |
From: <hib...@li...> - 2006-06-30 05:58:32
|
Author: ste...@jb... Date: 2006-06-30 01:55:55 -0400 (Fri, 30 Jun 2006) New Revision: 10070 Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/HqlParser.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/ParseLexer.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/PathCollector.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/AbstractImplicitJoinContext.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/FromClauseImplicitJoinContext.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/ImplicitJoinContext.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/NormalImplicitJoinContext.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/PersisterReferenceBuilder.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/tree/ASTFactoryAwareNode.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/tree/PersisterReferenceContextAwareNode.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/tree/SessionFactoryAwareNode.java Removed: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/ResolverAware.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/SessionFactoryAwareNode.java Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/CollectionPersisterReference.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/EntityPersisterReference.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolverASTFactory.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/PersisterReferenceContext.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/PropertyReference.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/StatementNode.java branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/util/ReflectHelper.java Log: redid parse phase and simplified portions of resolve phase Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/HqlParser.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/HqlParser.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/HqlParser.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,384 @@ +package org.hibernate.hql.ast.parse; + +import org.hibernate.hql.antlr.GeneratedHqlParser; +import org.hibernate.hql.antlr.ParseTokenTypes; +import org.hibernate.hql.ast.util.ASTUtil; +import org.hibernate.hql.ast.util.ASTPrinter; +import org.hibernate.hql.ast.HqlToken; +import org.hibernate.hql.ast.ErrorCounter; +import org.hibernate.hql.ast.HqlASTFactory; +import org.hibernate.hql.ast.ParseErrorHandler; +import org.hibernate.QueryException; +import org.hibernate.util.ReflectHelper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import java.io.StringReader; +import java.io.PrintStream; +import java.io.PrintWriter; + +import antlr.collections.AST; +import antlr.RecognitionException; +import antlr.TokenStreamException; +import antlr.Token; +import antlr.ASTPair; +import antlr.MismatchedTokenException; + +/** + * The parser used by Hibernate to generate an AST given an input + * HQL string (a "stream parser"). The produced AST is then used + * (and mutated) by later phases/parsers to apply semantic resolution; + * this parser, however, is all about syntax resolution. + * + * @author Steve Ebersole + */ +public class HqlParser extends GeneratedHqlParser { + + public static final Log log = LogFactory.getLog( HqlParser.class ); + + private final ParseErrorHandler parseErrorHandler = new ErrorCounter(); + private final ASTPrinter printer = new ASTPrinter( ParseTokenTypes.class ); + + public HqlParser(String hql) { + super( new ParseLexer( new StringReader( hql ) ) ); + setASTFactory( new HqlASTFactory() ); + } + + + // overrides of Antlr infastructure methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void reportError(RecognitionException e) { + parseErrorHandler.reportError( e ); + } + + public void reportError(String s) { + parseErrorHandler.reportError( s ); + } + + public void reportWarning(String s) { + parseErrorHandler.reportWarning( s ); + } + + public ParseErrorHandler getParseErrorHandler() { + return parseErrorHandler; + } + + static public void panic() { + //overriden to avoid System.exit + throw new QueryException( "Parser: panic" ); + } + + + // various AST output methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public void dumpAst(AST ast) { + dumpAst( ast, "DUMP" ); + } + + public void dumpAst(AST ast, String header) { + log.info( printer.showAsString( ast, header ) ); + } + + public void showAst(AST ast, PrintStream out) { + showAst( ast, new PrintWriter( out ) ); + } + + private void showAst(AST ast, PrintWriter pw) { + printer.showAst( ast, pw ); + } + + + // overrides of grammar semantic actions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + public String extractEntityName(AST node) throws RecognitionException { + dumpAst( node, "expected entity name path" ); + return PathCollector.getPath( node ); + } + + public String extractDynamicInstantiationPojoName(AST node) { + return PathCollector.getPath( node ); + } + + public String extractJavaConstantReference(AST node) throws RecognitionException { + return PathCollector.getPath( node ); + } + + public boolean isJavaConstant() throws RecognitionException, TokenStreamException { + if ( LA( 2 ) != DOT ) { + return false; + } + String path = LT( 1 ).getText() + "." + LT( 3 ).getText(); + int currentLaPotion = 5; + while ( LA( currentLaPotion - 1 ) == DOT ) { + path += "." + LT( currentLaPotion ).getText(); + currentLaPotion += 2; + } + return isJavaConstant( path ); + } + + public boolean isJavaConstant(AST dotStructure) throws RecognitionException, TokenStreamException { + return isJavaConstant( PathCollector.getPath( dotStructure ) ); + } + + private boolean isJavaConstant(String path) { + try { + log.trace( "Testing path [" + path + "] as potential java constant" ); + Object value = ReflectHelper.getConstantValueStrictly( path ); + log.trace( "Resolved path to java constant [" + value + "]" ); + return true; + } + catch( Throwable t ) { + log.trace( "Path did not resolve to java constant : " + t ); + return false; + } + } + + /** + * Overrides the base behavior to retry keywords as identifiers. + * + * @param token The token. + * @param ex The recognition exception. + * @return AST - The new AST. + * @throws antlr.RecognitionException if the substitution was not possible. + * @throws antlr.TokenStreamException if the substitution was not possible. + */ + public AST handleIdentifierError(Token token, RecognitionException ex) throws RecognitionException, TokenStreamException { + // If the token can tell us if it could be an identifier... + if ( token instanceof HqlToken ) { + HqlToken hqlToken = ( HqlToken ) token; + // ... and the token could be an identifer and the error is + // a mismatched token error ... + if ( hqlToken.isPossibleID() && ex instanceof MismatchedTokenException ) { + MismatchedTokenException mte = ( MismatchedTokenException ) ex; + // ... and the expected token type was an identifier, then: + if ( mte.expecting == IDENT ) { + // Use the token as an identifier. + reportWarning( + "Keyword '"+ token.getText() + + "' is being interpreted as an identifier due to: " + + mte.getMessage() + ); + // Add the token to the AST. + ASTPair currentAST = new ASTPair(); + token.setType( WEIRD_IDENT ); + astFactory.addASTChild( currentAST, astFactory.create( token ) ); + consume(); + return currentAST.root; + } + } + } + return super.handleIdentifierError( token, ex ); + } + + public void handleDotIdent() throws TokenStreamException { + // This handles HHH-354, where there is a strange property name in a where clause. + // If the lookahead contains a DOT then something that isn't an IDENT... + if ( LA( 1 ) == DOT && LA( 2 ) != IDENT ) { + // See if the second lookahed token can be an identifier. + if ( ( ( HqlToken ) LT( 2 ) ).isPossibleID() ) { + // Set it! + LT( 2 ).setType( IDENT ); + if ( log.isDebugEnabled() ) { + log.debug( "handleDotIdent() : new LT(2) token - " + LT( 1 ) ); + } + } + } + } + + /** + * Returns an equivalent tree for (NOT (a relop b) ), for example:<pre> + * (NOT (GT a b) ) => (LE a b) + * </pre> + * + * @param x The sub tree to transform, the parent is assumed to be NOT. + * @return AST - The equivalent sub-tree. + */ + public AST negateNode(AST x) { + //TODO: switch statements are always evil! We already had bugs because + // of forgotten token types. Use polymorphism for this! + switch ( x.getType() ) { + case OR: + x.setType(AND); + x.setText("{and}"); + negateNode( x.getFirstChild() ); + negateNode( x.getFirstChild().getNextSibling() ); + return x; + case AND: + x.setType(OR); + x.setText("{or}"); + negateNode( x.getFirstChild() ); + negateNode( x.getFirstChild().getNextSibling() ); + return x; + case EQ: + x.setType( NE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (EQ a b) ) => (NE a b) + case NE: + x.setType( EQ ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NE a b) ) => (EQ a b) + case GT: + x.setType( LE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (GT a b) ) => (LE a b) + case LT: + x.setType( GE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LT a b) ) => (GE a b) + case GE: + x.setType( LT ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (GE a b) ) => (LT a b) + case LE: + x.setType( GT ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LE a b) ) => (GT a b) + case LIKE: + x.setType( NOT_LIKE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (LIKE a b) ) => (NOT_LIKE a b) + case NOT_LIKE: + x.setType( LIKE ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NOT_LIKE a b) ) => (LIKE a b) + case IN: + x.setType( NOT_IN ); + x.setText( "{not}" + x.getText() ); + return x; + case NOT_IN: + x.setType( IN ); + x.setText( "{not}" + x.getText() ); + return x; + case IS_NULL: + x.setType( IS_NOT_NULL ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (IS_NULL a b) ) => (IS_NOT_NULL a b) + case IS_NOT_NULL: + x.setType( IS_NULL ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (IS_NOT_NULL a b) ) => (IS_NULL a b) + case BETWEEN: + x.setType( NOT_BETWEEN ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (BETWEEN a b) ) => (NOT_BETWEEN a b) + case NOT_BETWEEN: + x.setType( BETWEEN ); + x.setText( "{not}" + x.getText() ); + return x; // (NOT (NOT_BETWEEN a b) ) => (BETWEEN a b) +/* This can never happen because this rule will always eliminate the child NOT. + case NOT: + return x.getFirstChild(); // (NOT (NOT x) ) => (x) +*/ + default: + return super.negateNode( x ); // Just add a 'not' parent. + } + } + + /** + * Post process equality expressions, clean up the subtree. + * + * @param x The equality expression. + * @return AST - The clean sub-tree. + */ + public AST processEqualityExpression(AST x) { + if ( x == null ) { + log.warn( "processEqualityExpression() : No expression to process!" ); + return null; + } + + int type = x.getType(); + if ( type == EQ || type == NE ) { + boolean negated = type == NE; + if ( x.getNumberOfChildren() == 2 ) { + AST a = x.getFirstChild(); + AST b = a.getNextSibling(); + // (EQ NULL b) => (IS_NULL b) + if ( a.getType() == NULL && b.getType() != NULL ) { + return createIsNullParent( b, negated ); + } + // (EQ a NULL) => (IS_NULL a) + else if ( b.getType() == NULL && a.getType() != NULL ) { + return createIsNullParent( a, negated ); + } + else if ( b.getType() == EMPTY ) { + return processIsEmpty( a, negated ); + } + else { + return x; + } + } + else { + return x; + } + } + else { + return x; + } + } + + private AST createIsNullParent(AST node, boolean negated) { + node.setNextSibling( null ); + int type = negated ? IS_NOT_NULL : IS_NULL; + String text = negated ? "is not null" : "is null"; + return ASTUtil.createParent( astFactory, type, text, node ); + } + + private AST processIsEmpty(AST node, boolean negated) { + node.setNextSibling( null ); + // NOTE: Because we're using ASTUtil.createParent(), the tree must be created from the bottom up. + // IS EMPTY x => (EXISTS (QUERY (SELECT_FROM (FROM x) ) ) ) + AST ast = createSubquery( node ); + ast = ASTUtil.createParent( astFactory, EXISTS, "exists", ast ); + // Add NOT if it's negated. + if ( !negated ) { + ast = ASTUtil.createParent( astFactory, NOT, "not", ast ); + } + return ast; + } + + private AST createSubquery(AST node) { + AST ast = ASTUtil.createParent( astFactory, RANGE, "RANGE", node ); + ast = ASTUtil.createParent( astFactory, FROM, "from", ast ); + ast = ASTUtil.createParent( astFactory, SELECT_FROM, "SELECT_FROM", ast ); + ast = ASTUtil.createParent( astFactory, QUERY, "QUERY", ast ); + return ast; + } + + public void weakKeywords() throws TokenStreamException { + int t = LA( 1 ); + switch ( t ) { + case ORDER: + case GROUP: + // Case 1: Multi token keywords GROUP BY and ORDER BY + // The next token ( LT(2) ) should be 'by'... otherwise, this is just an ident. + if ( LA( 2 ) != LITERAL_by ) { + LT( 1 ).setType( IDENT ); + if ( log.isDebugEnabled() ) { + log.debug( "weakKeywords() : new LT(1) token - " + LT( 1 ) ); + } + } + break; + default: + // Case 2: The current token is after FROM and before '.'. + if (LA(0) == FROM && t != IDENT && LA(2) == DOT) { + HqlToken hqlToken = (HqlToken)LT(1); + if (hqlToken.isPossibleID()) { + hqlToken.setType(IDENT); + if ( log.isDebugEnabled() ) { + log.debug( "weakKeywords() : new LT(1) token - " + LT( 1 ) ); + } + } + } + break; + } + } + + public void processMemberOf(Token n, AST p, ASTPair currentAST) { + AST inAst = n == null ? astFactory.create( IN, "in" ) : astFactory.create( NOT_IN, "not in" ); + astFactory.makeASTRoot( currentAST, inAst ); + AST ast = createSubquery( p ); + ast = ASTUtil.createParent( astFactory, IN_LIST, "inList", ast ); + inAst.addChild( ast ); + } + +} Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/ParseLexer.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/ParseLexer.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/ParseLexer.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,53 @@ +package org.hibernate.hql.ast.parse; + +import org.hibernate.hql.antlr.GeneratedParseLexer; +import org.hibernate.hql.ast.HqlToken; +import org.hibernate.QueryException; + +import java.io.InputStream; +import java.io.Reader; + +import antlr.Token; + +/** + * The HQL parse lexer + * + * @author Steve Ebersole + */ +public class ParseLexer extends GeneratedParseLexer { + private boolean possibleID = false; + + public ParseLexer(InputStream in) { + super( in ); + } + + public ParseLexer(Reader in) { + super( in ); + } + + public void setTokenObjectClass(String cl) { + // Ignore the token class name parameter, and use a specific token class. + super.setTokenObjectClass( HqlToken.class.getName() ); + } + + protected void setPossibleID(boolean possibleID) { + this.possibleID = possibleID; + } + + protected Token makeToken(int i) { + HqlToken token = ( HqlToken ) super.makeToken( i ); + token.setPossibleID( possibleID ); + possibleID = false; + return token; + } + + public void panic() { + //overriden to avoid System.exit + panic( "CharScanner: panic" ); + } + + public void panic(String s) { + //overriden to avoid System.exit + throw new QueryException( s ); + } +} Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/PathCollector.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/PathCollector.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/parse/PathCollector.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,34 @@ +package org.hibernate.hql.ast.parse; + +import org.hibernate.hql.ast.util.NodeTraverser; +import org.hibernate.hql.antlr.ParseTokenTypes; +import antlr.collections.AST; + +/** + * Utilizes a NodeTraverser in order to collect a path from + * a (expecting dot-structure) an AST. + * + * @author Steve Ebersole + */ +public class PathCollector implements NodeTraverser.VisitationStrategy, ParseTokenTypes { + private String path = ""; + + public void visit(AST node) { + if ( node.getType() != DOT ) { + path += "." + node.getText(); + } + } + + private PathCollector() { + } + + public static String getPath(AST dotStructure) { + if ( dotStructure.getType() == IDENT ) { + return dotStructure.getText(); + } + PathCollector collector = new PathCollector(); + NodeTraverser walker = new NodeTraverser( collector ); + walker.traverseDepthFirst( dotStructure ); + return collector.path.substring( 1 ); + } +} Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/AbstractImplicitJoinContext.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/AbstractImplicitJoinContext.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/AbstractImplicitJoinContext.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,49 @@ +package org.hibernate.hql.ast.resolve; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * Provides basic templating around how the two {@link org.hibernate.hql.ast.resolve.ImplicitJoinContext} + * method calls need to be interpreted and handled in different scenarios. + * + * @author Steve Ebersole + */ +public abstract class AbstractImplicitJoinContext implements ImplicitJoinContext { + + public static final Log log = LogFactory.getLog( AbstractImplicitJoinContext.class ); + + private PropertyPathPart prior; + + protected abstract PropertyPathPart handleRoot(String rootPathPart); + protected abstract PropertyReference handleRootAsTerminus(String pathPart); + + public final void handleIntermediatePathPart(String pathPart) { + if ( prior == null ) { + prior = handleRoot( pathPart ); + } + else { + prior = prior.handleIntermediatePathPart( pathPart ); + } + } + + public final PropertyReference handleTerminalPathPart(String pathPart) { + try { + if ( prior == null ) { + return handleRootAsTerminus( pathPart ); + } + else { + return prior.handleTerminalPathPart( pathPart ); + } + } + finally { + // clear our processing state in preparation for any future path expression + prior = null; + } + } + + protected static interface PropertyPathPart { + public PropertyPathPart handleIntermediatePathPart(String name); + public PropertyReference handleTerminalPathPart(String name); + } +} Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/CollectionPersisterReference.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/CollectionPersisterReference.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/CollectionPersisterReference.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -12,9 +12,8 @@ /** * @author Steve Ebersole */ -public class CollectionPersisterReference extends EntityPersisterReference implements ResolverAware { +public class CollectionPersisterReference extends EntityPersisterReference { - private HqlResolver resolver; private String role; private String alias; private QueryableCollection persister; @@ -26,7 +25,7 @@ public QueryableCollection getCollectionPersister() { if ( persister == null ) { - persister = ( QueryableCollection ) resolver.getSessionFactory().getCollectionPersister( role ); + persister = ( QueryableCollection ) getSessionFactory().getCollectionPersister( role ); } return persister; } @@ -34,7 +33,7 @@ public Queryable getEntityPersister() { if ( getCollectionPersister().getElementType().isEntityType() ) { EntityType elementEntityType = ( EntityType ) getCollectionPersister().getElementType(); - return ( Queryable ) elementEntityType.getAssociatedJoinable( resolver.getSessionFactory() ); + return ( Queryable ) elementEntityType.getAssociatedJoinable( getSessionFactory() ); } else { throw new QueryException( "not a collection of entities" ); @@ -65,7 +64,7 @@ EntityType elementEntityType = ( EntityType ) elementType; try { Queryable elementEntityPersister = ( Queryable ) elementEntityType - .getAssociatedJoinable( resolver.getSessionFactory() ); + .getAssociatedJoinable( getSessionFactory() ); return elementEntityPersister.getPropertyType( propertyName ); } catch( Throwable t ) { @@ -97,12 +96,9 @@ return false; } - public void setHqlResolver(HqlResolver resolver) { - this.resolver = resolver; - } - public String toString() { return "CollectionPersisterReference {role=" + getName() + ", alias=" + getAlias() + ", element-type=" + getCollectionPersister().getElementType() + "}"; } -} + +} \ No newline at end of file Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/EntityPersisterReference.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/EntityPersisterReference.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/EntityPersisterReference.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -3,26 +3,29 @@ import org.hibernate.type.AssociationType; import org.hibernate.type.Type; import org.hibernate.persister.entity.Queryable; +import org.hibernate.engine.SessionFactoryImplementor; +import org.hibernate.hql.ast.tree.SessionFactoryAwareNode; /** * @author Steve Ebersole */ -public class EntityPersisterReference extends PersisterReference implements ResolverAware { +public class EntityPersisterReference extends PersisterReference implements SessionFactoryAwareNode { private String entityName; private String alias; + private boolean propertyFetch; - private HqlResolver resolver; - + private SessionFactoryImplementor sessionFactory; private transient Queryable persister; - public void initialize(String entityName, String alias) { + public void initialize(String entityName, String alias, boolean propertyFetch) { this.entityName = entityName; this.alias = alias; + this.propertyFetch = propertyFetch; } public Queryable getEntityPersister() { if ( persister == null ) { - persister = ( Queryable ) resolver.getSessionFactory().getEntityPersister( entityName ); + persister = ( Queryable ) sessionFactory.getEntityPersister( entityName ); } return persister; } @@ -35,6 +38,10 @@ return alias; } + public boolean isPropertyFetch() { + return propertyFetch; + } + public AssociationType getPersisterType() { return ( AssociationType ) getEntityPersister().getType(); } @@ -56,7 +63,11 @@ return "EntityPersisterReference {entity-name=" + entityName + ", alias=" + alias + "}"; } - public void setHqlResolver(HqlResolver resolver) { - this.resolver = resolver; + public void setSessionFactory(SessionFactoryImplementor sessionFactory) { + this.sessionFactory = sessionFactory; } + + protected SessionFactoryImplementor getSessionFactory() { + return sessionFactory; + } } Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/FromClauseImplicitJoinContext.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/FromClauseImplicitJoinContext.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/FromClauseImplicitJoinContext.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,98 @@ +package org.hibernate.hql.ast.resolve; + +import org.hibernate.QueryException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * todo: describe FromClauseImplicitJoinContext + * + * @author Steve Ebersole + */ +public class FromClauseImplicitJoinContext extends AbstractImplicitJoinContext { + + public static final Log log = LogFactory.getLog( FromClauseImplicitJoinContext.class ); + + private final PersisterReferenceContext persisterReferenceContext; + private final PersisterReferenceBuilder persisterReferenceBuilder; + + private final JoinType joinType; + private final String alias; + private final boolean fetching; + private final boolean propertyFetching; + + public FromClauseImplicitJoinContext( + PersisterReferenceContext persisterReferenceContext, + PersisterReferenceBuilder persisterReferenceBuilder, + JoinType joinType, + String alias, + boolean fetching, + boolean propertyFetching) { + this.persisterReferenceContext = persisterReferenceContext; + this.persisterReferenceBuilder = persisterReferenceBuilder; + this.joinType = joinType; + this.alias = alias; + this.fetching = fetching; + this.propertyFetching = propertyFetching; + } + + protected PropertyPathPart handleRoot(String rootPathPart) { + log.debug( "attempting to resolve [" + rootPathPart + "] as alias" ); + PersisterReference ref = persisterReferenceContext.locatePersisterReferenceByAlias( rootPathPart ); + if ( ref == null ) { + log.debug( "attempting to resolve [" + rootPathPart + "] as unqualified property reference" ); + ref = persisterReferenceContext.locatePersisterReferenceExposingProperty( rootPathPart ); + if ( ref == null ) { + throw new QueryException( "unable to resolve path expression root [" + rootPathPart + "]" ); + } + else { + ref = ( PersisterReference ) persisterReferenceBuilder + .buildPropertyJoin( ref, rootPathPart, joinType, null, fetching, false ) + .getFirstChild(); + } + } + return new PathPart( ref ); + } + + protected PropertyReference handleRootAsTerminus(String pathPart) { + // this should only ever mean that we have a simple unqualified property reference + log.debug( "attempting to resolve [" + pathPart + "] as unqualified property reference" ); + PersisterReference ref = persisterReferenceContext.locatePersisterReferenceExposingProperty( pathPart ); + if ( ref == null ) { + throw new QueryException( "unable to resolve unqualified property reference [" + pathPart + "]" ); + } + persisterReferenceBuilder.buildPropertyJoin( ref, pathPart, joinType, alias, fetching, propertyFetching ); + + // for joins in the from clause, we dont care about the property ref... + return null; + } + + private class PathPart implements PropertyPathPart { + private final PersisterReference persisterReference; + + public PathPart(PersisterReference persisterReference) { + this.persisterReference = persisterReference; + } + + public PropertyPathPart handleIntermediatePathPart(String name) { + return new PathPart( ( PersisterReference ) buildJoin( name, null, false ).getFirstChild() ); + } + + public PropertyReference handleTerminalPathPart(String name) { + buildJoin( name, alias, propertyFetching ); + // for joins in the from clause, we dont care about the property ref... + return null; + } + + private JoinNode buildJoin(String name, String alias, boolean propertyFetching) { + return persisterReferenceBuilder.buildPropertyJoin( + persisterReference, + name, + joinType, + alias, + fetching, + propertyFetching + ); + } + } +} Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolver.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -1,35 +1,42 @@ package org.hibernate.hql.ast.resolve; -import org.hibernate.hql.antlr.HqlBaseResolver; -import org.hibernate.hql.CollectionProperties; +import org.hibernate.hql.antlr.GeneratedHqlResolver; import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.QueryException; -import org.hibernate.type.Type; -import org.hibernate.type.ComponentType; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import antlr.collections.AST; +import java.util.LinkedList; + /** - * Adds specific semantic action behavior needed to perform useful resolution. + * Actual {@link GeneratedHqlResolver} implementation used in the query translator + * providing semantic action implementation. * * @author Joshua Davis * @author Steve Ebersole */ -public class HqlResolver extends HqlBaseResolver { +public class HqlResolver extends GeneratedHqlResolver implements HqlResolverASTFactory.Context { private static Log log = LogFactory.getLog( HqlResolver.class ); private final SessionFactoryImplementor sessionFactory; + private final PersisterReferenceBuilder persisterReferenceBuilder; + private StatementNode currentStatement; + private ImplicitJoinContextTracker implicitJoinContextTracker = new ImplicitJoinContextTracker(); public HqlResolver(SessionFactoryImplementor sessionFactory) { super(); this.sessionFactory = sessionFactory; setASTFactory( new HqlResolverASTFactory( this ) ); + persisterReferenceBuilder = new PersisterReferenceBuilder( getASTFactory(), sessionFactory ); } + + // HqlResolverASTFactory.Context implementation ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + public SessionFactoryImplementor getSessionFactory() { return sessionFactory; } @@ -38,115 +45,94 @@ return currentStatement; } + + // semantic action implementations ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /** + * Semantic action called whenever we start a new statement (i.e top-level statement vs. subquery) + * + * @param statementNode The statement we are starting to process. + */ protected void pushStatement(AST statementNode) { log.trace( "pushing new statement context : " + currentStatement + " -> " + statementNode ); StatementNode statement = ( StatementNode ) statementNode; + statement.setPersisterReferenceBuilder( persisterReferenceBuilder ); + implicitJoinContextTracker.push( + new NormalImplicitJoinContext( statement, persisterReferenceBuilder, getASTFactory() ) + ); if ( currentStatement != null ) { currentStatement.pushChild( statement ); } currentStatement = statement; } + /** + * Semantic action called whenever we complete processing a statement. + */ protected void popStatement() { log.trace( "popping statement context : " + currentStatement + " -> " + currentStatement.getParentStatement() ); currentStatement = currentStatement.getParentStatement(); + implicitJoinContextTracker.pop(); } - protected AST buildEntityPersisterReference(String entityName, AST alias) { - return currentStatement.buildEntityPersisterReference( entityName, alias == null ? null : alias.getText() ); + protected void pushExplicitJoinContext(AST joinTypeNode, AST fetch, AST alias, AST propertyFetch) { + log.debug( "pushing explicit (from-clause) implicit join context" ); + implicitJoinContextTracker.push( + new FromClauseImplicitJoinContext( + currentStatement, + persisterReferenceBuilder, + resolveJoinType( joinTypeNode ), + textOrNull( alias ), + fetch != null, + propertyFetch != null + ) + ); } - private JoinNode createJoinNode(JoinType type, JoinSource source, boolean fetch, PersisterReference rhs) { - JoinNode node = ( JoinNode ) getASTFactory().create( JOIN, "join" ); - node.initialize( type, source, fetch, rhs ); - rhs.addChild( node ); - return node; + protected void popExplicitJoinContext() { + log.debug( "popping implicit join context" ); + implicitJoinContextTracker.pop(); } /** - * Parser has recognized an explicit join based on a "property join". - * <p/> - * For example, something like: <tt>from Animal a join a.mother</tt> or - * <tt>from Animal a join a.mother.mother as gm</tt> or even - * <tt>from Animal join mother</tt> - * <p/> - * Here, we will need to resolve the explicit join, as well as handle any - * implied joins. - * </p> - * Note that any implicit joins here are handled rather differently than - * implicit joins in any clause other than an explicit join. See - * {@link #resolveCompoundPropertyReference} for the differences. Mainly, the - * differences are:<ul> - * </ul> + * Semantic action called to perform generation of an {@link EntityPersisterReference} + * representing a "root" persister reference. * - * @param propertyPath The property(s) being joined to, which may indicate - * additional implicit joins are intended. - * @param aliasNode The alias to be applied to the generated persister reference - * @param joinTypeNode The type of join indicated - * @param fetchNode Was relation fetching specified? - * @param propertyFetchNode Was property fetching specified? - * @param withClauseNode Any with clause. + * @param entityName The name of the entity. + * @param alias An (optional) alias for later qualification-resolution to the generated + * persister reference. + * @param propertyFetch Was property fetching explicitly specified. + * @return The generated reference. */ - protected void handleExplicitPropertyJoin( - AST propertyPath, - AST aliasNode, - AST joinTypeNode, - AST fetchNode, - AST propertyFetchNode, - AST withClauseNode) { - JoinType joinType = resolveJoinType( joinTypeNode ); - String alias = aliasNode == null ? null : aliasNode.getText(); - boolean relationFetch = fetchNode != null; - boolean propertyFetch = propertyFetchNode != null; + protected AST buildEntityPersisterReference(AST entityName, AST alias, AST propertyFetch) { + return buildEntityPersisterReference( entityName.getText(), textOrNull( alias ), propertyFetch != null ); + } - // propertyPath should be a "flattened" dot node structure, which always have at least one child - AST next = null; - String firstPathExpression = propertyPath.getFirstChild().getText(); - EntityPersisterReference root = resolveRootAsAlias( firstPathExpression ); - if ( root != null ) { - // the root of the path expression is an alias referencing a persister reference, - // so the alias itself has essentially been resolved already, so "consume it" - next = propertyPath.getFirstChild().getNextSibling(); - } - else { - // otherwise, the expectation is that the root of the path expression is an - // unqualified property reference, in which case we essentially use the - // root as a look ahead, and now need to actually resolve it - root = resolveRootAsUnqualified( firstPathExpression ); - if ( root == null ) { - throw new QueryException( "unable to determine root of path expression [" + reconstitutePathString( propertyPath ) + "]" ); - } - next = propertyPath.getFirstChild(); - } + private EntityPersisterReference buildEntityPersisterReference(String entityName, String alias, boolean propertyFetching) { +// return currentStatement.buildEntityPersisterReference( entityName, alias, propertyFetching ); + return persisterReferenceBuilder.buildEntityPersisterReference( entityName, alias, propertyFetching ); + } - // At this point, we know the persister which acts as the root or base (root) of the - // path structure (propertyPath), as well as the first actual path expression which - // needs resolving (next). So now, we need to start iterating all the path - // expressions and performing the resolutions. - PersisterReference lhs = root; - JoinSource joinSource = JoinSource.EXPLICIT; - boolean firstPass = true; - while ( next != null ) { - // todo : components? - boolean isLast = next.getNextSibling() == null; + private String textOrNull(AST node) { + return node == null ? null : node.getText(); + } - JoinNode joinNode = createJoinNode( joinType, joinSource, relationFetch, lhs ); - PersisterReference rhs = currentStatement.getPersisterReference( lhs, next.getText(), isLast ? alias : null ); - joinNode.addChild( rhs ); + private JoinNode createJoinNode(JoinType type, JoinSource source, boolean fetch, PersisterReference rhs) { + JoinNode node = ( JoinNode ) getASTFactory().create( JOIN, "join" ); + node.initialize( type, source, fetch, rhs ); + rhs.addChild( node ); + return node; + } - if ( isLast && withClauseNode != null ) { - joinNode.addChild( withClauseNode ); - } - if ( firstPass ) { - firstPass = false; - joinSource = JoinSource.IMPLICIT; - } + protected void handleIntermediatePathPart(AST name) { + log.debug( "handling intermediate path part [" + name.getText() + "]" ); + implicitJoinContextTracker.getCurrent().handleIntermediatePathPart( name.getText() ); + } - lhs = rhs; - next = next.getNextSibling(); - } - + protected AST handleTerminalPathPart(AST name) { + log.debug( "handling terminal path part [" + name.getText() + "]" ); + return implicitJoinContextTracker.getCurrent().handleTerminalPathPart( name.getText() ); } protected void handleAdHocJoinNode(AST persisterReference, AST joinType, AST onClause) { @@ -175,23 +161,6 @@ throw new QueryException( "Unrecognized join type [" + joinType.getText() + "]" ); } - public PersisterReference buildPropertyJoin(PersisterReference source, String propertyName, String alias) { - JoinNode join = createJoinNode( JoinType.INNER, JoinSource.IMPLICIT, false, source ); - PersisterReference persisterReference = currentStatement.getPersisterReference( source, propertyName, alias ); - join.addChild( persisterReference ); - return persisterReference; - } - - protected boolean isEntityName(String test) { - try { - return sessionFactory.getEntityPersister( test ) != null; - } - catch( Throwable t ) { - // ignore it... - } - return false; - } - protected String reconstitutePathString(AST propertyReference) { AST child = propertyReference.getFirstChild(); String prefix = ""; @@ -204,265 +173,22 @@ return buffer.toString(); } - protected AST resolveAtomicPropertyReference(AST propertyNode) { - EntityPersisterReference persisterReference = resolveRootAsUnqualified( propertyNode.getText() ); - return generatePropertyReference( persisterReference, propertyNode.getText() ); - } + private class ImplicitJoinContextTracker { + private LinkedList stack = new LinkedList(); - private PropertyReference generatePropertyReference(PersisterReference persisterReference, String propertyName) { - PropertyReference propertyReferenceNode = ( PropertyReference ) getASTFactory() - .create( PROPERTY_REF, persisterReference.getAlias() + "." + propertyName ); - - AST aliasNode = getASTFactory().create( ALIAS, persisterReference.getAlias() ); - propertyReferenceNode.addChild( aliasNode ); - - AST propertyNameNode = getASTFactory().create( IDENT, propertyName ); - propertyReferenceNode.addChild( propertyNameNode ); - - return propertyReferenceNode; - } - - protected AST resolveCompoundPropertyReference(AST dotStructure) { - PersisterReference persisterReference; - AST next; - - // first, resolve root origin - String firstPathExpression = dotStructure.getFirstChild().getText(); - if ( dotStructure.getNumberOfChildren() == 1 ) { - // can only really represent an unqualified simple property ref - EntityPersisterReference match = resolveRootAsUnqualified( firstPathExpression ); - if ( match == null ) { - throw new QueryException( "unable to resolve property [" + firstPathExpression + "] as unqualified reference" ); - } - persisterReference = match; - // the root of the path expression is an alias referencing a persister reference, - // so the alias itself has essentially been resolved already, so "consume it" - next = dotStructure.getFirstChild(); + public ImplicitJoinContextTracker() { } - else { - // need to decide whether 'firstPathExpression' refers to a from-clause alias, or - // an unqualified property name. from-clause alias has a higher precedence - // for matching purposes, so try to resolve as alias first - EntityPersisterReference match = resolveRootAsAlias( firstPathExpression ); - if ( match != null ) { - // the root of the path expression is an alias referencing a persister reference, - // so the alias itself has essentially been resolved already, so "consume it" - next = dotStructure.getFirstChild().getNextSibling(); - } - else { - // then try as unqualified - match = resolveRootAsUnqualified( firstPathExpression ); - if ( match != null ) { - // the root of the path expression is an unqualified property reference, - // in which case we essentially use the root as a look ahead, and now - // need to actually resolve it - next = dotStructure.getFirstChild(); - } - else { - throw new QueryException( "unable to determine root of path expression [" + reconstitutePathString( dotStructure ) + "]" ); - } - } - persisterReference = match; - } - PropertyPathPart propertySource = new PropertyPathRoot( ( EntityPersisterReference ) persisterReference ); - String pathSoFar = persisterReference.getAlias(); - - while ( next != null && next.getNextSibling() != null ) { - String propertyName = next.getText(); - pathSoFar += ( "." + propertyName ); - - PersisterReference built = currentStatement.locatePersisterReferenceByPath( pathSoFar ); - if ( built != null ) { - propertySource = new PropertyPathRoot( ( EntityPersisterReference ) built ); - } - else { - propertySource = propertySource.handleIntermediatePathPart( propertyName ); - } - - next = next.getNextSibling(); + public void push(ImplicitJoinContext context) { + stack.addFirst( context ); } - if ( next == null ) { - throw new QueryException( "illegal parser state" ); + public void pop() { + stack.removeFirst(); } - return propertySource.handleLeafPathPart( next.getText() ); - } - private EntityPersisterReference resolveRootAsAlias(String firstPathExpression) { - return ( EntityPersisterReference ) currentStatement.locatePersisterReferenceByAlias( firstPathExpression ); - } - - private EntityPersisterReference resolveRootAsUnqualified(String firstPathExpression) { - return currentStatement.locatePersisterReferenceExposingProperty( firstPathExpression ); - } - - private PropertyPathPart determineAppropriatePartType(EntityPersisterReference origin, String propertyName) { - Type propertyType = origin.getPropertyType( propertyName ); - if ( propertyType.isComponentType() ) { - return new ComponentPropertyReference( origin, propertyName, ( ComponentType ) propertyType ); + public ImplicitJoinContext getCurrent() { + return ( ImplicitJoinContext ) stack.getFirst(); } - else if ( propertyType.isEntityType() ) { - return new EntityPropertyReference( origin, propertyName, false ); - } - else if ( propertyType.isCollectionType() ) { - return new CollectionPropertyReference( origin, propertyName ); - } - else { - return new SimplePropertyReference( origin, propertyName ); - } } - - private int locateComponentPropertyIndex(ComponentType componentType, String subPropertyName) { - String[] componentPropertyNames = componentType.getPropertyNames(); - for ( int i = 0; i < componentPropertyNames.length; i++ ) { - if ( componentPropertyNames[i].equals( subPropertyName ) ) { - return i; - } - } - throw new QueryException( "could not locate component property [" + subPropertyName + "]" ); - } - - // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - // todo : this stuff needs to be finished up. - // specific things needing to be completed: - // (1) only dereference an actual entity association property into a - // join when it is further dereferenced (already handled) or when - // occuring in select clause - // (2) properly handle "collection properties" - // (3) properly handle components - // (4) properly handle index operations - // - // todo : look at utilizing this from handleExplicitPropertyJoin(...) also - - /** - * Contract for handling delegation for a particular portion of an implicit - * join structure (aka property path). - */ - private interface PropertyPathPart { - public PropertyPathPart handleIntermediatePathPart(String name); - public PropertyReference handleLeafPathPart(String name); - } - - private class PropertyPathRoot implements PropertyPathPart { - private final EntityPersisterReference persisterReference; - - public PropertyPathRoot(EntityPersisterReference persisterReference) { - this.persisterReference = persisterReference; - } - - public PropertyPathPart handleIntermediatePathPart(String name) { - return determineAppropriatePartType( persisterReference, name ); - } - - public PropertyReference handleLeafPathPart(String name) { - // todo : this really needs to consider whether a join might be needed - // based on the property type and type of clause - return generatePropertyReference( persisterReference, name ); - } - - } - - private class SimplePropertyReference implements PropertyPathPart { - private final EntityPersisterReference origin; - private final String propertyName; - - public SimplePropertyReference(EntityPersisterReference origin, String propertyName) { - this.origin = origin; - this.propertyName = propertyName; - } - - public PropertyPathPart handleIntermediatePathPart(String name) { - throw new QueryException( "cannot perform implicit join based on simple property" ); - } - - public PropertyReference handleLeafPathPart(String name) { - throw new QueryException( "cannot perform implicit join based on simple property" ); - } - } - - private class ComponentPropertyReference implements PropertyPathPart { - private final EntityPersisterReference origin; - private final String componentPropertyName; - private final ComponentType componentType; - - public ComponentPropertyReference(EntityPersisterReference origin, String componentPropertyName) { - this( origin, componentPropertyName, ( ComponentType ) origin.getPropertyType( componentPropertyName ) ); - } - - public ComponentPropertyReference(EntityPersisterReference origin, String componentPropertyName, ComponentType componentType) { - this.origin = origin; - this.componentPropertyName = componentPropertyName; - this.componentType = componentType; - } - - public PropertyPathPart handleIntermediatePathPart(String propertyName) { - int index = locateComponentPropertyIndex( componentType, propertyName ); - String path = buildDerefPath( propertyName ); - Type propertyType = componentType.getSubtypes()[index]; - if ( propertyType.isComponentType() ) { - return new ComponentPropertyReference( origin, path, ( ComponentType ) propertyType ); - } - else if ( propertyType.isEntityType() ) { - return new EntityPropertyReference( origin, path, false ); - } - else { - return new SimplePropertyReference( origin, path ); - } - } - - public PropertyReference handleLeafPathPart(String name) { - return generatePropertyReference( origin, buildDerefPath( name ) ); - } - - private String buildDerefPath(String subPropertyName) { - return componentPropertyName + "." + subPropertyName; - } - } - - private class EntityPropertyReference implements PropertyPathPart { - private final EntityPersisterReference origin; - private final String propertyName; - - private boolean joined; - - public EntityPropertyReference(EntityPersisterReference origin, String propertyName, boolean joined) { - this.origin = origin; - this.propertyName = propertyName; - this.joined = joined; - } - - public PropertyPathPart handleIntermediatePathPart(String name) { - EntityPersisterReference joinedPersister = ( EntityPersisterReference ) buildPropertyJoin( origin, propertyName, null ); - return determineAppropriatePartType( joinedPersister, name ); - } - - public PropertyReference handleLeafPathPart(String name) { - // not always needed (i.e. : .id) - EntityPersisterReference joinedPersister = ( EntityPersisterReference ) buildPropertyJoin( origin, propertyName, null ); - return generatePropertyReference( joinedPersister, name ); - } - } - - private class CollectionPropertyReference implements PropertyPathPart { - private final EntityPersisterReference origin; - private final String collectionPropertyName; - - public CollectionPropertyReference(EntityPersisterReference origin, String collectionPropertyName) { - this.origin = origin; - this.collectionPropertyName = collectionPropertyName; - } - - public PropertyPathPart handleIntermediatePathPart(String name) { - throw new QueryException( "illegal attempt to perform implicit join across collection property" ); - } - - public PropertyReference handleLeafPathPart(String name) { - if ( CollectionProperties.isAnyCollectionProperty( name ) ) { - CollectionPersisterReference joinedPersister = ( CollectionPersisterReference ) buildPropertyJoin( origin, collectionPropertyName, null ); - return generatePropertyReference( joinedPersister, name ); - } - throw new QueryException( "illegal attempt to perform implicit join across collection property" ); - } - } } Modified: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolverASTFactory.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolverASTFactory.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/HqlResolverASTFactory.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -1,24 +1,33 @@ package org.hibernate.hql.ast.resolve; -import org.hibernate.hql.antlr.HqlRTokenTypes; +import org.hibernate.hql.antlr.ResolveTokenTypes; import org.hibernate.hql.ast.HqlASTFactory; +import org.hibernate.hql.ast.tree.SessionFactoryAwareNode; +import org.hibernate.hql.ast.tree.PersisterReferenceContextAwareNode; +import org.hibernate.hql.ast.tree.ASTFactoryAwareNode; import org.hibernate.engine.SessionFactoryImplementor; import antlr.collections.AST; import antlr.Token; /** * AST factory for the resolver phase. - * <br>User: Joshua Davis - * Date: Apr 3, 2006 - * Time: 7:58:16 AM + * + * @author Joshua Davis + * @author Steve Ebersole */ -public class HqlResolverASTFactory extends HqlASTFactory implements HqlRTokenTypes { - private final HqlResolver resolver; +public class HqlResolverASTFactory extends HqlASTFactory implements ResolveTokenTypes { - public HqlResolverASTFactory(HqlResolver resolver) { - this.resolver = resolver; + public static interface Context { + public SessionFactoryImplementor getSessionFactory(); + public PersisterReferenceContext getPersisterReferenceContext(); } + private final Context context; + + public HqlResolverASTFactory(Context context) { + this.context = context; + } + public Class getASTNodeType(int tokenType) { // Statement nodes: switch (tokenType) { @@ -55,11 +64,14 @@ } private void prepare(AST node) { - if ( node instanceof SessionFactoryAwareNode ) { - ( ( SessionFactoryAwareNode ) node ).setSessionFactory( resolver.getSessionFactory() ); + if ( node instanceof org.hibernate.hql.ast.tree.SessionFactoryAwareNode ) { + ( ( SessionFactoryAwareNode ) node ).setSessionFactory( context.getSessionFactory() ); } - if ( node instanceof ResolverAware ) { - ( ( ResolverAware ) node ).setHqlResolver( resolver ); + if ( node instanceof PersisterReferenceContextAwareNode ) { + ( ( PersisterReferenceContextAwareNode ) node ).setPersisterReferenceContext( context.getPersisterReferenceContext() ); } + if ( node instanceof ASTFactoryAwareNode ) { + ( ( ASTFactoryAwareNode ) node ).setASTFactory( this ); + } } } Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/ImplicitJoinContext.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/ImplicitJoinContext.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/ImplicitJoinContext.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,11 @@ +package org.hibernate.hql.ast.resolve; + +/** + * Contract for how implicit joins are handled. + * + * @author Steve Ebersole + */ +public interface ImplicitJoinContext { + public void handleIntermediatePathPart(String pathPart); + public PropertyReference handleTerminalPathPart(String pathPart); +} Added: branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/NormalImplicitJoinContext.java =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/NormalImplicitJoinContext.java 2006-06-30 05:54:36 UTC (rev 10069) +++ branches/HQL_ANTLR_2/Hibernate3/src/org/hibernate/hql/ast/resolve/NormalImplicitJoinContext.java 2006-06-30 05:55:55 UTC (rev 10070) @@ -0,0 +1,236 @@ +package org.hibernate.hql.ast.resolve; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.hibernate.hql.antlr.ResolveTokenTypes; +import org.hibernate.hql.CollectionProperties; +import org.hibernate.QueryException; +import org.hibernate.type.Type; +import org.hibernate.type.ComponentType; +import antlr.ASTFactory; +import antlr.collections.AST; + +/** + * Defines the behavior of how implicit joins are normally handled. + * <p/> + * All other implementations of {@link org.hibernate.hql.ast.resolve.ImplicitJoinContext} are + * considered special cases. + * + * @author Steve Ebersole + */ +public class NormalImplicitJoinContext extends AbstractImplicitJoinContext { + + public static final Log log = LogFactory.getLog( NormalImplicitJoinContext.class ); + + private final PersisterReferenceContext persisterReferenceContext; + private final PersisterReferenceBuilder persisterReferenceBuilder; + private final ASTFactory astFactory; + + private PropertyPathPart prior; + + public NormalImplicitJoinContext( + PersisterReferenceContext persisterReferenceContext, + PersisterReferenceBuilder persisterReferenceBuilder, + ASTFactory astFactory) { + this.persisterReferenceContext = persisterReferenceContext; + this.persisterReferenceBuilder = persisterReferenceBuilder; + this.astFactory = astFactory; + } + + protected PropertyPathPart handleRoot(String pathPart) { + PersisterReference persisterReference = resolveAsAlias( pathPart ); + if ( persisterReference != null ) { + return new PropertyPathRoot( ( EntityPersisterReference ) persisterReference ); + } + + persisterReference = resolveAsUnqualified( pathPart ); + if ( persisterReference != null ) { + return new EntityPropertyReference( ( EntityPersisterReference ) persisterReference, pathPart, false ); + } + + throw new QueryException( "unable to resolve path expression root [" + pathPart + "]" ); + } + + protected PropertyReference handleRootAsTerminus(String pathPart) { + PersisterReference ref = resolveAsUnqualified( pathPart ); + return generatePropertyReference( ref, pathPart ); + } + + + + + private PropertyReference generatePropertyReference(PersisterReference persisterReference, String propertyName) { + PropertyReference propertyReferenceNode = ( PropertyReference ) astFactory.create( ResolveTokenTypes.PROPERTY_REF, persisterReference.getAlias() + "." + propertyName ); + + AST aliasNode = astFactory.create( ResolveTokenTypes.ALIAS, persisterReference.getAlias() ); + propertyReferenceNode.addChild( aliasNode ); + + AST propertyNameNode = astFactory.create( ResolveTokenTypes.IDENT, propertyName ); + propertyReferenceNode.addChild( propertyNameNode ); + + return propertyReferenceNode; + } + + private EntityPersisterReference resolveAsAlias(String name) { + return ( EntityPersisterReference ) persisterReferenceContext.locatePersisterReferenceByAlias( name ); + } + + private EntityPersisterReference resolveAsUnqualified(String firstPathExpression) { + return persisterReferenceContext.locatePersisterReferenceExposingProperty( firstPathExpression ); + } + + private PropertyPathPart determineAppropriatePartType(EntityPersisterReference origin, String propertyName) { + Type propertyType = origin.getPropertyType( propertyName ); + if ( propertyType.isComponentType() ) { + return new ComponentPropertyReference( origin, propertyName, ( ComponentType ) propertyType ); + } + else if ( propertyType.isEntityType() ) { + return new EntityPropertyReference( origin, propertyName, false ); + } + else if ( propertyType.isCollectionType() ) { + return new CollectionPropertyReference( origin, propertyName ); + } + else { + return new SimplePropertyReference( origin, propertyName ); + } + } + + private int locateComponentPropertyIndex(ComponentType componentType, String subPropertyName) { + String[] componentPropertyNames = componentType.getPropertyNames(); + for ( int i = 0; i < componentPropertyNames.length; i++ ) { + if ( componentPropertyNames[i].equals( subPropertyName ) ) { + return i; + } + } + throw new QueryException( "could not locate component property [" + subPropertyName + "]" ); + } + + private class PropertyPathRoot implements PropertyPathPart { + private final EntityPersisterReference persisterReference; + + public PropertyPathRoot(EntityPersisterReference persisterReference) { + this.persisterReference = persisterReference; + } + + public PropertyPathPart handleIntermediatePathPart(String name) { + return determineAppropriatePartType( persisterReference, name ); + } + + public PropertyReference handleTerminalPathPart(String name) { + // todo : this really needs to consider whether a join might be needed + // based on the property type and type of clause + return generatePropertyReference( persisterReference, name ); + } + + } + + private class SimplePropertyReference implements PropertyPathPart { + private final EntityPersisterReference origin; + private final String propertyName; + + public SimplePropertyReference(EntityPersisterReference origin, String propertyName) { + this.origin = origin; + this.propertyName = propertyName; + } + + public PropertyPathPart handleIntermediatePathPart(String name) { + throw new QueryException( "cannot perform implicit join based on simple property" ); + } + + public PropertyReference handleTerminalPathPart(String name) { + throw new QueryException( "cannot perform implicit join based on simple property" ); + } + } + + private class ComponentPropertyReference implements PropertyPathPart { + private final EntityPersisterReference origin; + private final String componentPropertyName; + private final ComponentType componentType; + + public ComponentPropertyReference(EntityPersisterReference origin, String componentPropertyName) { + this( origin, componentPropertyName, ( ComponentType ) origin.getPropertyType( componentPropertyName ) ); + } + + public ComponentPropertyReference(EntityPersisterReference origin, String componentPropertyName, ComponentType componentType) { + this.origin = origin; + this.compone... [truncated message content] |
From: <hib...@li...> - 2006-06-30 05:54:40
|
Author: ste...@jb... Date: 2006-06-30 01:54:36 -0400 (Fri, 30 Jun 2006) New Revision: 10069 Added: branches/HQL_ANTLR_2/Hibernate3/g2/ branches/HQL_ANTLR_2/Hibernate3/g2/parse.g branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g Log: redid parse phase and simplified portions of resolve phase Added: branches/HQL_ANTLR_2/Hibernate3/g2/parse.g =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/g2/parse.g 2006-06-28 17:07:06 UTC (rev 10068) +++ branches/HQL_ANTLR_2/Hibernate3/g2/parse.g 2006-06-30 05:54:36 UTC (rev 10069) @@ -0,0 +1,937 @@ +header +{ +// $Id: $ + +package org.hibernate.hql.antlr; + +import org.hibernate.hql.ast.*; +import org.hibernate.hql.ast.util.*; + +} +/** + * An Antlr stream parser for building a syntax AST representing + * an input Hibernate Query Language (HQL) query. + * + * @author Joshua Davis + * @author Steve Ebersole + */ +class GeneratedHqlParser extends Parser; + +options +{ + exportVocab=Parse; + buildAST=true; + k=3; // For 'not like', 'not in', etc. +} + +tokens +{ + // -- HQL Keyword tokens -- + ALL="all"; + ANY="any"; + AND="and"; + AS="as"; + ASCENDING="asc"; + AVG="avg"; + BETWEEN="between"; + CLASS="class"; + COUNT="count"; + DELETE="delete"; + DESCENDING="desc"; + DOT; + DISTINCT="distinct"; + ELEMENTS="elements"; + ESCAPE="escape"; + EXISTS="exists"; + FALSE="false"; + FETCH="fetch"; + FROM="from"; + FULL="full"; + GROUP="group"; + HAVING="having"; + IN="in"; + INDICES="indices"; + INNER="inner"; + INSERT="insert"; + INTO="into"; + IS="is"; + JOIN="join"; + LEFT="left"; + LIKE="like"; + MAX="max"; + MIN="min"; + MINUS_Q="minus"; // i.e. select a from B minus select c from D + NEW="new"; + NOT="not"; + NULL="null"; + OR="or"; + ORDER="order"; + OUTER="outer"; + PROPERTIES="properties"; + RIGHT="right"; + SELECT="select"; + SET="set"; + SOME="some"; + SUM="sum"; + TRUE="true"; + UNION="union"; + UPDATE="update"; + VERSIONED="versioned"; + WHERE="where"; + + // -- SQL tokens -- + // These aren't part of HQL, but the SQL fragment parser uses the HQL lexer, so they need to be declared here. + CASE="case"; + END="end"; + ELSE="else"; + THEN="then"; + WHEN="when"; + ON="on"; + WITH="with"; + + // -- EJBQL tokens -- + BOTH="both"; + EMPTY="empty"; + LEADING="leading"; + MEMBER="member"; + OBJECT="object"; + OF="of"; + TRAILING="trailing"; + + // -- Synthetic token types -- + AGGREGATE; // One of the aggregate functions (e.g. min, max, avg) + ALIAS; + CONSTRUCTOR; + CASE2; + EXPR_LIST; + FILTER_ENTITY; // FROM element injected because of a filter expression (happens during compilation phase 2) + IN_LIST; + INDEX_OP; + IS_NOT_NULL; + IS_NULL; // Unary 'is null' operator. + METHOD_CALL; + NOT_BETWEEN; + NOT_IN; + NOT_LIKE; + ORDER_ELEMENT; + QUERY; + RANGE; + ROW_STAR; + SELECT_FROM; + UNARY_MINUS; + UNARY_PLUS; + VECTOR_EXPR; // ( x, y, z ) + WEIRD_IDENT; // Identifiers that were keywords when they came in. + ENTITY_NAME; + COLLECTION_ROLE; + CLASS_NAME; + + // Literal tokens. + CONSTANT; + NUM_DOUBLE; + NUM_FLOAT; + NUM_LONG; + JAVA_CONSTANT; +} + +{ + /** True if this is a filter query (allow no FROM clause). **/ + private boolean filter = false; + + /** + * Sets the filter flag. + * @param f True for a filter query, false for a normal query. + */ + public void setFilter(boolean f) { + filter = f; + } + + /** + * Returns true if this is a filter query, false if not. + * @return true if this is a filter query, false if not. + */ + public boolean isFilter() { + return filter; + } + + /** + * This method is overriden in the sub class in order to provide the + * 'keyword as identifier' hack. + * @param token The token to retry as an identifier. + * @param ex The exception to throw if it cannot be retried as an identifier. + */ + public AST handleIdentifierError(Token token,RecognitionException ex) throws RecognitionException, TokenStreamException { + // Base implementation: Just re-throw the exception. + throw ex; + } + + /** + * This method looks ahead and converts . <token> into . IDENT when + * appropriate. + */ + public void handleDotIdent() throws TokenStreamException { + } + + /** + * Returns the negated equivalent of the expression. + * @param x The expression to negate. + */ + public AST negateNode(AST x) { + // Just create a 'not' parent for the default behavior. + return ASTUtil.createParent(astFactory, NOT, "not", x); + } + + /** + * Returns the 'cleaned up' version of a comparison operator sub-tree. + * @param x The comparison operator to clean up. + */ + public AST processEqualityExpression(AST x) throws RecognitionException { + return x; + } + + public void weakKeywords() throws TokenStreamException { } + + public void processMemberOf(Token n,AST p,ASTPair currentAST) { } + + public String extractEntityName(AST node) throws RecognitionException { + return node.getText(); + } + + public boolean isJavaConstant() throws RecognitionException, TokenStreamException { + return false; + } + + public boolean isJavaConstant(AST dotStructure) throws RecognitionException, TokenStreamException { + return false; + } + + public String extractJavaConstantReference(AST node) throws RecognitionException { + return node.getText(); + } + + public String extractDynamicInstantiationPojoName(AST node) { + return node.getText(); + } + + public void showAST(AST ast) { + } +} + + +// MAIN RULE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +statement + : ( selectStatement | updateStatement | deleteStatement | insertStatement ) + ; + + +// select statement ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +selectStatement + : queryRule { + #selectStatement = #([QUERY,"query"], #selectStatement); + } + ; + +queryRule + : selectFrom (whereClause)? (groupByClause)? (orderByClause)? + ; + +selectFrom! + : (s:selectClause)? (f:fromClause)? { + // If there was no FROM clause and this is a filter query, create a from clause. Otherwise, throw + // an exception because non-filter queries must have a FROM clause. + if (#f == null) { + if (filter) { + #f = #([FROM,"{filter-implied FROM}"]); + } + else + throw new SemanticException("FROM expected (non-filter queries must contain a FROM clause)"); + } + // Create an artificial token so the 'FROM' can be placed + // before the SELECT in the tree to make tree processing + // simpler. + #selectFrom = #([SELECT_FROM,"SELECT_FROM"],f,s); + } + ; + + +// select clause ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +selectClause + : SELECT^ // NOTE: The '^' after a token causes the corresponding AST node to be the root of the sub-tree. + { weakKeywords(); } // Weak keywords can appear immediately after a SELECT token. + (DISTINCT)? ( selectedPropertiesList | newExpression | selectObject ) + ; + + +// from clause ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +// NOTE: This *must* begin with the "FROM" token, otherwise the sub-query rule will be ambiguous +// with the expression rule. +// Also note: after a comma weak keywords are allowed and should be treated as identifiers. +fromClause + : FROM^ { weakKeywords(); } fromRange ( fromJoin | COMMA! { weakKeywords(); } fromRange )* + ; + +fromRange + : fromClassOrOuterQueryPath + | inClassDeclaration + | inCollectionDeclaration + | inCollectionElementsDeclaration + ; + +fromClassOrOuterQueryPath! + : c:path { weakKeywords(); } (a:asAlias)? (p:propertyFetch)? { + String entityName = extractEntityName( #c ); + AST en = #( [ENTITY_NAME, entityName] ); + en.initialize( #c ); + #fromClassOrOuterQueryPath = #([RANGE, "RANGE"], [ENTITY_NAME, entityName], #a, #p); + } + ; + +inClassDeclaration! + : a:alias IN! CLASS! c:path { + String entityName = extractEntityName( #c ); + #inClassDeclaration = #([RANGE, "RANGE"], [ENTITY_NAME, entityName], #a); + } + ; + +inCollectionDeclaration! + : IN! OPEN! p:path CLOSE! a:alias { + #inCollectionDeclaration = #([JOIN, "join"], [INNER, "inner"], #p, #a); + } + ; + +inCollectionElementsDeclaration! + : a:alias IN! ELEMENTS! OPEN! p:path CLOSE! { + #inCollectionElementsDeclaration = #([JOIN, "join"], [INNER, "inner"], #p, #a); + } + ; + +//fromJoin +// : ( ( ( LEFT | RIGHT ) (OUTER)? ) | FULL | INNER )? JOIN^ (FETCH)? path (asAlias)? (propertyFetch)? (withClause)? +// ; +fromJoin! + : (jt:joinType)? j:JOIN (f:FETCH)? p:path (a:asAlias)? (pf:propertyFetch)? (w:withClause)? { + #fromJoin = #( #j, #jt, #f, #a, #pf, #p, #w ); + } + ; + +joinType + : ( ( LEFT | RIGHT ) (OUTER)? ) + | FULL + | INNER + ; + +withClause + : WITH^ logicalExpression + ; + + +// Alias rule - Parses the optional 'as' token and forces an AST identifier node. +asAlias + : (AS!)? alias + ; + +alias + : a:identifier { #a.setType(ALIAS); } + ; + +propertyFetch + : FETCH ALL! PROPERTIES! + ; + + +// update statement ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +updateStatement + : UPDATE^ (VERSIONED)? optionalFromTokenFromClause setClause (whereClause)? + ; + + +// delete statement ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +deleteStatement + : DELETE^ (optionalFromTokenFromClause) (whereClause)? + ; + + +// insert statement ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +insertStatement + // Would be nice if we could abstract the FromClause/FromElement logic + // out such that it could be reused here; something analogous to + // a "table" rule in sql-grammars + : INSERT^ intoClause selectStatement + ; + +union + : queryRule (UNION queryRule)* + ; + + +// clauses ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +optionalFromTokenFromClause! + : (FROM!)? f:path (a:asAlias)? { + #optionalFromTokenFromClause = #( [FROM, "FROM"], #f, #a ); + } + ; + +setClause + : (SET^ assignment (COMMA! assignment)*) + ; + +assignment + : stateField EQ^ newValue + ; + +// "state_field" is the term used in the EJB3 sample grammar; used here for easy reference. +// it is basically a property ref +stateField + : path + ; + +newValue + : concatenation + ; + +intoClause + : INTO^ path { weakKeywords(); } insertablePropertySpec + ; + +insertablePropertySpec + : OPEN! primaryExpression ( COMMA! primaryExpression )* CLOSE! { + // Just need *something* to distinguish this on the hql-sql.g side + #insertablePropertySpec = #([RANGE, "column-spec"], #insertablePropertySpec); + } + ; + + +//newExpression +// : (NEW! path) op:OPEN^ {#op.setType(CONSTRUCTOR);} selectedPropertiesList CLOSE! +// ; +newExpression! + : ( NEW! c:path ) OPEN! args:selectedPropertiesList CLOSE! { + String className = extractDynamicInstantiationPojoName( #c ); + AST container = #( [CLASS_NAME, className] ); + #newExpression = #( [CONSTRUCTOR, "new"], #container, #args ); + } + ; + +selectObject + : OBJECT^ OPEN! identifier CLOSE! + ; + +//## groupByClause: +//## GROUP_BY path ( COMMA path )*; + +groupByClause + : GROUP^ + "by"! expression ( COMMA! expression )* + (havingClause)? + ; + +//## orderByClause: +//## ORDER_BY selectedPropertiesList; + +orderByClause + : ORDER^ "by"! orderElement ( COMMA! orderElement )* + ; + +orderElement + : expression ( ascendingOrDescending )? + ; + +ascendingOrDescending + : ( "asc" | "ascending" ) { #ascendingOrDescending.setType(ASCENDING); } + | ( "desc" | "descending") { #ascendingOrDescending.setType(DESCENDING); } + ; + +//## havingClause: +//## HAVING logicalExpression; + +havingClause + : HAVING^ logicalExpression + ; + +//## whereClause: +//## WHERE logicalExpression; + +whereClause + : WHERE^ logicalExpression + ; + +//## selectedPropertiesList: +//## ( path | aggregate ) ( COMMA path | aggregate )*; + +selectedPropertiesList + : aliasedExpression ( COMMA! aliasedExpression )* + ; + +aliasedExpression + : expression ( AS^ identifier )? + ; + +// expressions +// Note that most of these expressions follow the pattern +// thisLevelExpression : +// nextHigherPrecedenceExpression +// (OPERATOR nextHigherPrecedenceExpression)* +// which is a standard recursive definition for a parsing an expression. +// +// Operator precedence in HQL +// lowest --> ( 7) OR +// ( 6) AND, NOT +// ( 5) equality: ==, <>, !=, is +// ( 4) relational: <, <=, >, >=, +// LIKE, NOT LIKE, BETWEEN, NOT BETWEEN, IN, NOT IN +// ( 3) addition and subtraction: +(binary) -(binary) +// ( 2) multiplication: * / %, concatenate: || +// highest --> ( 1) +(unary) -(unary) +// [] () (method call) . (dot -- identifier qualification) +// aggregate function +// () (explicit parenthesis) +// +// Note that the above precedence levels map to the rules below... +// Once you have a precedence chart, writing the appropriate rules as below +// is usually very straightfoward + +logicalExpression + : expression + ; + +// Main expression rule +expression + : logicalOrExpression + ; + +// level 7 - OR +logicalOrExpression + : logicalAndExpression ( OR^ logicalAndExpression )* + ; + +// level 6 - AND, NOT +logicalAndExpression + : negatedExpression ( AND^ negatedExpression )* + ; + +// NOT nodes aren't generated. Instead, the operator in the sub-tree will be +// negated, if possible. Expressions without a NOT parent are passed through. +negatedExpression! +{ weakKeywords(); } // Weak keywords can appear in an expression, so look ahead. + : NOT^ x:negatedExpression { #negatedExpression = negateNode(#x); } + | y:equalityExpression { #negatedExpression = #y; } + ; + +//## OP: EQ | LT | GT | LE | GE | NE | SQL_NE | LIKE; + +// level 5 - EQ, NE +equalityExpression + : x:relationalExpression ( + ( EQ^ + | is:IS^ { #is.setType(EQ); } (NOT! { #is.setType(NE); } )? + | NE^ + | ne:SQL_NE^ { #ne.setType(NE); } + ) y:relationalExpression)* { + // Post process the equality expression to clean up 'is null', etc. + #equalityExpression = processEqualityExpression(#equalityExpression); + } + ; + +// level 4 - LT, GT, LE, GE, LIKE, NOT LIKE, BETWEEN, NOT BETWEEN +// NOTE: The NOT prefix for LIKE and BETWEEN will be represented in the +// token type. When traversing the AST, use the token type, and not the +// token text to interpret the semantics of these nodes. +relationalExpression + : concatenation ( + ( ( ( LT^ | GT^ | LE^ | GE^ ) additiveExpression )* ) + // Disable node production for the optional 'not'. + | (n:NOT!)? ( + // Represent the optional NOT prefix using the token type by + // testing 'n' and setting the token type accordingly. + (i:IN^ { + #i.setType( (n == null) ? IN : NOT_IN); + #i.setText( (n == null) ? "in" : "not in"); + } + inList) + | (b:BETWEEN^ { + #b.setType( (n == null) ? BETWEEN : NOT_BETWEEN); + #b.setText( (n == null) ? "between" : "not between"); + } + betweenList ) + | (l:LIKE^ { + #l.setType( (n == null) ? LIKE : NOT_LIKE); + #l.setText( (n == null) ? "like" : "not like"); + } + concatenation likeEscape) + | (MEMBER! OF! p:path! { + processMemberOf(n,#p,currentAST); + } ) ) + ) + ; + +likeEscape + : (ESCAPE^ concatenation)? + ; + +inList + : x:compoundExpr + { #inList = #([IN_LIST,"inList"], #inList); } + ; + +betweenList + : concatenation AND! concatenation + ; + +//level 4 - string concatenation +concatenation + : additiveExpression + ( c:CONCAT^ { #c.setType(EXPR_LIST); #c.setText("concatList"); } + additiveExpression + ( CONCAT! additiveExpression )* + { #concatenation = #([METHOD_CALL, "||"], #([IDENT, "concat"]), #c ); } )? + ; + +// level 3 - binary plus and minus +additiveExpression + : multiplyExpression ( ( PLUS^ | MINUS^ ) multiplyExpression )* + ; + +// level 2 - binary multiply and divide +multiplyExpression + : unaryExpression ( ( STAR^ | DIV^ ) unaryExpression )* + ; + +// level 1 - unary minus, unary plus, not +unaryExpression + : MINUS^ {#MINUS.setType(UNARY_MINUS);} unaryExpression + | PLUS^ {#PLUS.setType(UNARY_PLUS);} unaryExpression + | caseExpression + | quantifiedExpression + | atom + ; + +caseExpression + : CASE^ (whenClause)+ (elseClause)? END! + | CASE^ { #CASE.setType(CASE2); } unaryExpression (altWhenClause)+ (elseClause)? END! + ; + +whenClause + : (WHEN^ logicalExpression THEN! unaryExpression) + ; + +altWhenClause + : (WHEN^ unaryExpression THEN! unaryExpression) + ; + +elseClause + : (ELSE^ unaryExpression) + ; + +quantifiedExpression + : ( SOME^ | EXISTS^ | ALL^ | ANY^ ) + ( identifier | collectionExpr | (OPEN! ( subQuery ) CLOSE!) ) + ; + +// level 0 - expression atom +// ident qualifier ('.' ident ), array index ( [ expr ] ), +// method call ( '.' ident '(' exprList ') ) +atom + : primaryExpression + ( + DOT^ identifier + ( options { greedy=true; } : + ( op:OPEN^ {#op.setType(METHOD_CALL);} exprList CLOSE! ) )? + | lb:OPEN_BRACKET^ {#lb.setType(INDEX_OP);} expression CLOSE_BRACKET! + )* + ; + +primaryExpression + : identPrimary ( options {greedy=true;} : DOT^ "class" )? + | constant + | COLON^ identifier + // TODO: Add parens to the tree so the user can control the operator evaluation order. + | OPEN! (expressionOrVector | subQuery) CLOSE! + | PARAM^ (NUM_INT)? + ; + +// This parses normal expression and a list of expressions separated by commas. If a comma is encountered +// a parent VECTOR_EXPR node will be created for the list. +expressionOrVector! + : e:expression ( v:vectorExpr )? { + // If this is a vector expression, create a parent node for it. + if (#v != null) + #expressionOrVector = #([VECTOR_EXPR,"{vector}"], #e, #v); + else + #expressionOrVector = #e; + } + ; + +vectorExpr + : COMMA! expression (COMMA! expression)* + ; + +// identifier, followed by member refs (dot ident), or method calls. +// NOTE: handleDotIdent() is called immediately after the first IDENT is recognized because +// the method looks a head to find keywords after DOT and turns them into identifiers. +identPrimary + : identifier { handleDotIdent(); } + ( options { greedy=true; } : DOT^ ( identifier | ELEMENTS | o:OBJECT { #o.setType(IDENT); } ) )* + ( options { greedy=true; } : ( op:OPEN^ { #op.setType(METHOD_CALL);} exprList CLOSE! ) )? { + if ( isJavaConstant( #identPrimary ) ) { + String constant = extractJavaConstantReference( #identPrimary ); + #identPrimary = #( [JAVA_CONSTANT, constant] ); + } + } + // Also allow special 'aggregate functions' such as count(), avg(), etc. + | aggregate + ; + +//## aggregate: +//## ( aggregateFunction OPEN path CLOSE ) | ( COUNT OPEN STAR CLOSE ) | ( COUNT OPEN (DISTINCT | ALL) path CLOSE ); + +//## aggregateFunction: +//## COUNT | 'sum' | 'avg' | 'max' | 'min'; + +aggregate + : ( SUM^ | AVG^ | MAX^ | MIN^ ) OPEN! additiveExpression CLOSE! { #aggregate.setType(AGGREGATE); } + // Special case for count - It's 'parameters' can be keywords. + | COUNT^ OPEN! ( STAR { #STAR.setType(ROW_STAR); } | ( ( DISTINCT | ALL )? ( path | collectionExpr ) ) ) CLOSE! + | collectionExpr + ; + +//## collection: ( OPEN query CLOSE ) | ( 'elements'|'indices' OPEN path CLOSE ); + +collectionExpr + : (ELEMENTS^ | INDICES^) OPEN! path CLOSE! + ; + +// NOTE: compoundExpr can be a 'path' where the last token in the path is '.elements' or '.indicies' +compoundExpr + : collectionExpr + | path + | (OPEN! ( (expression (COMMA! expression)*) | subQuery ) CLOSE!) + ; + +subQuery + : union + { #subQuery = #([QUERY,"query"], #subQuery); } + ; + +exprList +{ + AST trimSpec = null; +} + : (t:TRAILING {#trimSpec = #t;} | l:LEADING {#trimSpec = #l;} | b:BOTH {#trimSpec = #b;})? + { if(#trimSpec != null) #trimSpec.setType(IDENT); } + ( + expression ( (COMMA! expression)+ | FROM { #FROM.setType(IDENT); } expression | AS! identifier )? + | FROM { #FROM.setType(IDENT); } expression + )? + { #exprList = #([EXPR_LIST,"exprList"], #exprList); } + ; + +constant + : NUM_INT + | NUM_FLOAT + | NUM_LONG + | NUM_DOUBLE + | QUOTED_STRING + | NULL + | TRUE + | FALSE + | EMPTY + ; + +javaConstant! + : c:path { + String constant = extractJavaConstantReference( #c ); + #javaConstant = #( [JAVA_CONSTANT, constant] ); + } + ; + +//## quantifiedExpression: 'exists' | ( expression 'in' ) | ( expression OP 'any' | 'some' ) collection; + +//## compoundPath: path ( OPEN_BRACKET expression CLOSE_BRACKET ( '.' path )? )*; + +//## path: identifier ( '.' identifier )*; + +path + : identifier ( DOT^ { weakKeywords(); } identifier )* + ; + + +// Wraps the IDENT token from the lexer, in order to provide +// 'keyword as identifier' trickery. +identifier + : IDENT + exception + catch [RecognitionException ex] + { + identifier_AST = handleIdentifierError(LT(1),ex); + } + ; + +// **** LEXER ****************************************************************** + +/** + * Hibernate Query Language Lexer, which provides the HQL parser with tokens. + * + * @author Joshua Davis + */ +class GeneratedParseLexer extends Lexer; + +options { + exportVocab=Parse; + testLiterals = false; + k=2; // needed for newline, and to distinguish '>' from '>='. + // HHH-241 : Quoted strings don't allow unicode chars - This should fix it. + charVocabulary='\u0000'..'\uFFFE'; // Allow any char but \uFFFF (16 bit -1, ANTLR's EOF character) + caseSensitive = false; + caseSensitiveLiterals = false; +} + +// -- Declarations -- +{ + // NOTE: The real implementations are in the subclass. + protected void setPossibleID(boolean possibleID) {} +} + +// -- Keywords -- + +EQ: '='; +LT: '<'; +GT: '>'; +SQL_NE: "<>"; +NE: "!=" | "^="; +LE: "<="; +GE: ">="; + +COMMA: ','; + +OPEN: '('; +CLOSE: ')'; +OPEN_BRACKET: '['; +CLOSE_BRACKET: ']'; + +CONCAT: "||"; +PLUS: '+'; +MINUS: '-'; +STAR: '*'; +DIV: '/'; +COLON: ':'; +PARAM: '?'; + +IDENT options { testLiterals=true; } + : ID_START_LETTER ( ID_LETTER )* + { + // Setting this flag allows the grammar to use keywords as identifiers, if necessary. + setPossibleID(true); + } + ; + +protected +ID_START_LETTER + : '_' + | '$' + | 'a'..'z' + | '\u0080'..'\ufffe' // HHH-558 : Allow unicode chars in identifiers + ; + +protected +ID_LETTER + : ID_START_LETTER + | '0'..'9' + ; + +QUOTED_STRING + : '\'' ( (ESCqs)=> ESCqs | ~'\'' )* '\'' + ; + +protected +ESCqs + : + '\'' '\'' + ; + +WS : ( ' ' + | '\t' + | '\r' '\n' { newline(); } + | '\n' { newline(); } + | '\r' { newline(); } + ) + {$setType(Token.SKIP);} //ignore this token + ; + +//--- From the Java example grammar --- +// a numeric literal +NUM_INT + {boolean isDecimal=false; Token t=null;} + : '.' {_ttype = DOT;} + ( ('0'..'9')+ (EXPONENT)? (f1:FLOAT_SUFFIX {t=f1;})? + { + if (t != null && t.getText().toUpperCase().indexOf('F')>=0) + { + _ttype = NUM_FLOAT; + } + else + { + _ttype = NUM_DOUBLE; // assume double + } + } + )? + | ( '0' {isDecimal = true;} // special case for just '0' + ( ('x') + ( // hex + // the 'e'|'E' and float suffix stuff look + // like hex digits, hence the (...)+ doesn't + // know when to stop: ambig. ANTLR resolves + // it correctly by matching immediately. It + // is therefore ok to hush warning. + options { warnWhenFollowAmbig=false; } + : HEX_DIGIT + )+ + | ('0'..'7')+ // octal + )? + | ('1'..'9') ('0'..'9')* {isDecimal=true;} // non-zero decimal + ) + ( ('l') { _ttype = NUM_LONG; } + + // only check to see if it's a float if looks like decimal so far + | {isDecimal}? + ( '.' ('0'..'9')* (EXPONENT)? (f2:FLOAT_SUFFIX {t=f2;})? + | EXPONENT (f3:FLOAT_SUFFIX {t=f3;})? + | f4:FLOAT_SUFFIX {t=f4;} + ) + { + if (t != null && t.getText().toUpperCase() .indexOf('F') >= 0) + { + _ttype = NUM_FLOAT; + } + else + { + _ttype = NUM_DOUBLE; // assume double + } + } + )? + ; + +// hexadecimal digit (again, note it's protected!) +protected +HEX_DIGIT + : ('0'..'9'|'a'..'f') + ; + +// a couple protected methods to assist in matching floating point numbers +protected +EXPONENT + : ('e') ('+'|'-')? ('0'..'9')+ + ; + +protected +FLOAT_SUFFIX + : 'f'|'d' + ; + Added: branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g =================================================================== --- branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g 2006-06-28 17:07:06 UTC (rev 10068) +++ branches/HQL_ANTLR_2/Hibernate3/g2/resolve.g 2006-06-30 05:54:36 UTC (rev 10069) @@ -0,0 +1,382 @@ +header +{ +// $Id:$ +package org.hibernate.hql.antlr; + +import java.util.*; +} + +/** + * An Antlr tree parser for "resolving" or "normalizing" an HQL + * syntax AST. This parser provides the vast majority of the + * semantic analysis of the HQL AST. + * <p/> + * Both "resolving" and "normalizing" here seek a single goal of + * building a dis-ambiguated, generic query AST. + * <p/> + * The act of resolving is essentially the process of simplifying + * complex node structures into atomic components based on contextual + * information (aka, the current parser state). The main thrust + * of this process is breaking down dot-structures (a series of + * DOT INDET pairs) into <ul> + * <li>a series of "implicit" join structures injected into the from clause tree</li> + * <li>a simple structure representing the "meaning" of the "leaf" of said dot-structure</li> + * </ul> + * <p/> + * The act of normalizing essentially refers to the process of dis-ambiguating + * node structures based on their context and creating a unified AST + * representation for different ways to express the same "idea". + * + * @author Joshua Davis + * @author Steve Ebersole + */ +class GeneratedHqlResolver extends TreeParser; + +options +{ + importVocab=Parse; + exportVocab=Resolve; + buildAST=true; +} + +tokens +{ + PROPERTY_REF; + ENTITY_PERSISTER_REF; + COLLECTION_PERSISTER_REF; + BOGUS; +} + + +// -- Declarations -- +{ + + // Statement node BEGIN/END handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected void pushStatement(AST statementNode) { + } + + protected void popStatement() { + } + + + // implicit join context pushing/popping ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected void pushExplicitJoinContext(AST joinType, AST fetch, AST alias, AST propertyFetch) { + } + + protected void popExplicitJoinContext() { + } + + // persister reference handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected AST buildEntityPersisterReference(AST entityName, AST alias, AST propertyFetch) { + return null; + } + + protected void handleAdHocJoinNode(AST persisterReference, AST joinType, AST onClause) { + } + + + // property reference handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + protected void handleIntermediatePathPart(AST name) { + } + + protected AST handleTerminalPathPart(AST name) { + return null; + } + +} + +// The main statement rule. +statement + : selectStatement | updateStatement | deleteStatement | insertStatement + ; + +// --- HQL statements --- + +selectStatement + : query + ; + +updateStatement + : #(UPDATE { pushStatement( #updateStatement ); } (VERSIONED)? fromClause setClause (whereClause)? { popStatement(); }) + ; + +deleteStatement + : #(DELETE { pushStatement( #deleteStatement ); } fromClause (whereClause)? { popStatement(); }) + ; + +insertStatement + : #(INSERT { pushStatement( #insertStatement ); } intoClause query { popStatement(); }) + ; + +query + : #(QUERY { pushStatement( #query ); } + // The first phase places the FROM first to make processing the SELECT simpler. + #( SELECT_FROM fromClause (selectClause)? ) + (whereClause)? + (groupClause)? + (orderClause)? { + popStatement(); + } + ) + ; + +// TODO : for now, just copy over the entire subtree +selectClause + : #(SELECT (subtree)* ) + ; + +// -- Language sub-elements -- + + +fromClause + : #( f:FROM range ( explicitJoin | range )* ) + ; + +range! + : #( RANGE e:entityPersisterReference ) { + #range = #e; + } + ; + +entityPersisterReference! + : en:ENTITY_NAME (a:ALIAS)? (pf:FETCH)? { + #entityPersisterReference = buildEntityPersisterReference( en, a, pf ); + } + ; + +explicitJoin! + : #(JOIN (jt:joinType)? joinRhs[jt] ) + ; + +joinRhs! [AST joinType] + : e:entityPersisterReference (on:ON)? { + handleAdHocJoinNode( #e, joinType, on ); + } + | (f:FETCH)? (a:ALIAS)? (pf:FETCH)? { pushExplicitJoinContext( joinType, #f, #a, #pf ); } prop:propertyPath (with:WITH)? { + popExplicitJoinContext(); + } + ; + +// TODO : still need to account for index operators in this series of rules... +propertyPath + : singlePartPropertyPath + | multiPartPropertyPath + ; + +singlePartPropertyPath! + : i:identifier { + #singlePartPropertyPath = handleTerminalPathPart( #i ); + } + ; + +multiPartPropertyPath! + : #( d:DOT lhs:multiPartPropertyPathIntermediateStructure rhs:multiPartPropertyPathTerminus ) { + #multiPartPropertyPath = #rhs; + } + ; + +multiPartPropertyPathIntermediateStructure! + : i:identifier { + // this represents the "root" of the path expression + handleIntermediatePathPart( #i ); + } + | #( d:DOT lhs:multiPartPropertyPathIntermediateStructure rhs:propertyName ) { + handleIntermediatePathPart( #rhs ); + } + ; + +multiPartPropertyPathTerminus! + : p:propertyName { + #multiPartPropertyPathTerminus = handleTerminalPathPart( #p ); + } + ; + +// TODO : need to add cross joins +joinType + : ( (LEFT | RIGHT) (OUTER)? ) + | FULL + | INNER + ; + +intoClause + : #(i:INTO (subtree)* ) + ; + +whereClause + : #(WHERE logicalExpr ) + ; + +groupClause + : #(GROUP (subtree)* ) + ; + +orderClause + : #(ORDER (subtree)* ) + ; + +setClause + : #(SET (subtree)* ) + ; + +logicalExpr + : #(AND logicalExpr logicalExpr) + | #(OR logicalExpr logicalExpr) + | #(NOT logicalExpr) + | comparisonExpr + ; + +comparisonExpr + : + ( #(EQ exprOrSubquery exprOrSubquery) + | #(NE exprOrSubquery exprOrSubquery) + | #(LT exprOrSubquery exprOrSubquery) + | #(GT exprOrSubquery exprOrSubquery) + | #(LE exprOrSubquery exprOrSubquery) + | #(GE exprOrSubquery exprOrSubquery) + | #(LIKE exprOrSubquery expr ( #(ESCAPE expr) )? ) + | #(NOT_LIKE exprOrSubquery expr ( #(ESCAPE expr) )? ) + | #(BETWEEN exprOrSubquery exprOrSubquery exprOrSubquery) + | #(NOT_BETWEEN exprOrSubquery exprOrSubquery exprOrSubquery) + | #(IN exprOrSubquery inRhs ) + | #(NOT_IN exprOrSubquery inRhs ) + | #(IS_NULL exprOrSubquery) + | #(IS_NOT_NULL exprOrSubquery) + | #(EXISTS ( expr | collectionFunctionOrSubselect ) ) + ) + ; + +inRhs + : #(IN_LIST ( collectionFunctionOrSubselect | ( (expr)* ) ) ) + ; + +exprOrSubquery + : expr + | query + | #(ANY collectionFunctionOrSubselect) + | #(ALL collectionFunctionOrSubselect) + | #(SOME collectionFunctionOrSubselect) + ; + +collectionFunctionOrSubselect + : collectionFunction + | query + ; + +collectionFunction + : #( ELEMENTS propertyRef ) + | #( INDICES propertyRef ) + ; + +count + : #(COUNT ( DISTINCT | ALL )? ( aggregateExpr | ROW_STAR ) ) + ; + +aggregateExpr + : expr + | collectionFunction + ; + +expr + : addrExpr + | #( VECTOR_EXPR (expr)* ) + | constant + | arithmeticExpr + | functionCall // Function call, not in the SELECT clause. + | parameter + | count // Count, not in the SELECT clause. + ; + +arithmeticExpr + : #(PLUS expr expr) + | #(MINUS expr expr) + | #(DIV expr expr) + | #(STAR expr expr) + | #(UNARY_MINUS expr) + | caseExpr + ; + +caseExpr + : #(CASE (#(WHEN logicalExpr expr))+ (#(ELSE expr))?) + | #(CASE2 expr (#(WHEN expr expr))+ (#(ELSE expr))?) + ; + +addrExpr + : propertyRef + | #(INDEX_OP addrExprLhs expr) + ; + +addrExprLhs + : addrExpr + ; + +constant + : literal + | NULL + | TRUE + | FALSE + ; + +literal + : NUM_INT + | NUM_LONG + | NUM_FLOAT + | NUM_DOUBLE + | QUOTED_STRING + ; + +parameter + : #(COLON identifier) + | #(PARAM (NUM_INT)?) + ; + +functionCall + : #(METHOD_CALL pathAsIdent ( #(EXPR_LIST (expr)* ) )? ) + | #(AGGREGATE aggregateExpr ) + ; + +propertyRef + : propertyPath + ; + +propertyName + : identifier + | CLASS + | ELEMENTS + | INDICES + ; + +// Matches a path and returns the normalized string for the path (usually +// fully qualified a class name). +pathAsString returns [String p] { + p = "???"; + String x = "?x?"; + } + : a:identifier { p = a.getText(); } + | #(DOT x=pathAsString y:identifier) { + StringBuffer buf = new StringBuffer(); + buf.append(x).append(".").append(y.getText()); + p = buf.toString(); + } + ; + +// Returns a path as a single identifier node. +pathAsIdent { + String text = "?text?"; + } + : text=pathAsString { + #pathAsIdent = #([IDENT,text]); + } + ; + +identifier + : (IDENT | WEIRD_IDENT) + ; + +// General subtree. Matches anything, copies the tree verbatim. +subtree + : #(. (subtree)*) + ; \ No newline at end of file |
From: <hib...@li...> - 2006-06-28 17:07:40
|
Author: ste...@jb... Date: 2006-06-28 13:07:06 -0400 (Wed, 28 Jun 2006) New Revision: 10068 Modified: trunk/Hibernate3/src/org/hibernate/jdbc/JDBCContext.java trunk/Hibernate3/src/org/hibernate/util/JTAHelper.java Log: HHH-1828 : Synchronization registration and rollback-only Modified: trunk/Hibernate3/src/org/hibernate/jdbc/JDBCContext.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/jdbc/JDBCContext.java 2006-06-28 13:24:59 UTC (rev 10067) +++ trunk/Hibernate3/src/org/hibernate/jdbc/JDBCContext.java 2006-06-28 17:07:06 UTC (rev 10068) @@ -18,6 +18,7 @@ import org.hibernate.SessionException; import org.hibernate.Transaction; import org.hibernate.TransactionException; +import org.hibernate.util.JTAHelper; import org.hibernate.engine.SessionFactoryImplementor; import org.hibernate.exception.JDBCExceptionHelper; import org.hibernate.transaction.CacheSynchronization; @@ -161,10 +162,16 @@ } else { javax.transaction.Transaction tx = tm.getTransaction(); - tx.registerSynchronization( new CacheSynchronization(owner, this, tx, null) ); - isTransactionCallbackRegistered = true; - log.debug("successfully registered Synchronization"); - return true; + if ( JTAHelper.isMarkedForRollback( tx ) ) { + log.debug( "Transaction is marked for rollback; skipping Synchronization registration" ); + return false; + } + else { + tx.registerSynchronization( new CacheSynchronization(owner, this, tx, null) ); + isTransactionCallbackRegistered = true; + log.debug("successfully registered Synchronization"); + return true; + } } } catch( HibernateException e ) { Modified: trunk/Hibernate3/src/org/hibernate/util/JTAHelper.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/util/JTAHelper.java 2006-06-28 13:24:59 UTC (rev 10067) +++ trunk/Hibernate3/src/org/hibernate/util/JTAHelper.java 2006-06-28 17:07:06 UTC (rev 10068) @@ -43,4 +43,12 @@ public static boolean isTransactionInProgress(javax.transaction.Transaction tx) throws SystemException { return tx != null && JTAHelper.isInProgress( tx.getStatus() ); } + + public static boolean isMarkedForRollback(int status) { + return status == Status.STATUS_MARKED_ROLLBACK; + } + + public static boolean isMarkedForRollback(javax.transaction.Transaction tx) throws SystemException { + return isMarkedForRollback( tx.getStatus() ); + } } |
Author: ste...@jb... Date: 2006-06-28 09:24:59 -0400 (Wed, 28 Jun 2006) New Revision: 10067 Modified: trunk/Hibernate3/src/org/hibernate/engine/query/HQLQueryPlan.java trunk/Hibernate3/src/org/hibernate/engine/query/NamedParameterDescriptor.java trunk/Hibernate3/src/org/hibernate/engine/query/ParamLocationRecognizer.java trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java trunk/Hibernate3/src/org/hibernate/engine/query/QueryPlanCache.java trunk/Hibernate3/src/org/hibernate/impl/AbstractQueryImpl.java trunk/Hibernate3/src/org/hibernate/loader/custom/sql/SQLQueryParser.java trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java Log: HHH-1552 : jpa-positional-param (?1) and param-list Modified: trunk/Hibernate3/src/org/hibernate/engine/query/HQLQueryPlan.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/engine/query/HQLQueryPlan.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/engine/query/HQLQueryPlan.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -286,18 +286,20 @@ ); } - Iterator itr = recognizer.getNamedParameterLocationMap().entrySet().iterator(); + Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator(); Map namedParamDescriptorMap = new HashMap(); while( itr.hasNext() ) { final Map.Entry entry = ( Map.Entry ) itr.next(); final String name = ( String ) entry.getKey(); - final int[] locArray = ArrayHelper.toIntArray( ( List ) entry.getValue() ); + final ParamLocationRecognizer.NamedParameterDescription description = + ( ParamLocationRecognizer.NamedParameterDescription ) entry.getValue(); namedParamDescriptorMap.put( name, new NamedParameterDescriptor( name, parameterTranslations.getNamedParameterExpectedType( name ), - locArray + description.buildPositionsArray(), + description.isJpaStyle() ) ); } Modified: trunk/Hibernate3/src/org/hibernate/engine/query/NamedParameterDescriptor.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/engine/query/NamedParameterDescriptor.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/engine/query/NamedParameterDescriptor.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -5,17 +5,21 @@ import java.io.Serializable; /** - * @author <a href="mailto:st...@hi...">Steve Ebersole </a> + * Descriptor regarding a named parameter. + * + * @author Steve Ebersole */ public class NamedParameterDescriptor implements Serializable { private final String name; private final Type expectedType; private final int[] sourceLocations; + private final boolean jpaStyle; - public NamedParameterDescriptor(String name, Type expectedType, int[] sourceLocations) { + public NamedParameterDescriptor(String name, Type expectedType, int[] sourceLocations, boolean jpaStyle) { this.name = name; this.expectedType = expectedType; this.sourceLocations = sourceLocations; + this.jpaStyle = jpaStyle; } public String getName() { @@ -29,4 +33,8 @@ public int[] getSourceLocations() { return sourceLocations; } + + public boolean isJpaStyle() { + return jpaStyle; + } } Modified: trunk/Hibernate3/src/org/hibernate/engine/query/ParamLocationRecognizer.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/engine/query/ParamLocationRecognizer.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/engine/query/ParamLocationRecognizer.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -1,5 +1,7 @@ package org.hibernate.engine.query; +import org.hibernate.util.ArrayHelper; + import java.util.Map; import java.util.HashMap; import java.util.List; @@ -9,10 +11,32 @@ * Implements a parameter parser recognizer specifically for the purpose * of journaling parameter locations. * - * @author <a href="mailto:st...@hi...">Steve Ebersole </a> + * @author Steve Ebersole */ public class ParamLocationRecognizer implements ParameterParser.Recognizer { - private Map namedParameterLocationMap = new HashMap(); + + public static class NamedParameterDescription { + private final boolean jpaStyle; + private final List positions = new ArrayList(); + + public NamedParameterDescription(boolean jpaStyle) { + this.jpaStyle = jpaStyle; + } + + public boolean isJpaStyle() { + return jpaStyle; + } + + private void add(int position) { + positions.add( new Integer( position ) ); + } + + public int[] buildPositionsArray() { + return ArrayHelper.toIntArray( positions ); + } + } + + private Map namedParameterDescriptions = new HashMap(); private List ordinalParameterLocationList = new ArrayList(); /** @@ -30,12 +54,12 @@ /** * Returns the map of named parameter locations. The map is keyed by - * parameter name; the corresponding value is an Integer list. + * parameter name; the corresponding value is a (@link NamedParameterDescription}. * * @return The map of named parameter locations. */ - public Map getNamedParameterLocationMap() { - return namedParameterLocationMap; + public Map getNamedParameterDescriptionMap() { + return namedParameterDescriptions; } /** @@ -58,18 +82,22 @@ } public void namedParameter(String name, int position) { - List locations = ( List ) namedParameterLocationMap.get( name ); - if ( locations == null ) { - locations = new ArrayList(); - namedParameterLocationMap.put( name, locations ); - } - locations.add( new Integer( position ) ); + getOrBuildNamedParameterDescription( name, false ).add( position ); } - public void ejb3PositionalParameter(String name, int position) { - namedParameter( name, position ); + public void jpaPositionalParameter(String name, int position) { + getOrBuildNamedParameterDescription( name, true ).add( position ); } + private NamedParameterDescription getOrBuildNamedParameterDescription(String name, boolean jpa) { + NamedParameterDescription desc = ( NamedParameterDescription ) namedParameterDescriptions.get( name ); + if ( desc == null ) { + desc = new NamedParameterDescription( jpa ); + namedParameterDescriptions.put( name, desc ); + } + return desc; + } + public void other(char character) { // don't care... } Modified: trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/engine/query/ParameterParser.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -7,7 +7,7 @@ /** * The single available method {@link #parse} is responsible for parsing a * query string and recognizing tokens in relation to parameters (either - * named, ejb3-style, or ordinal) and providing callbacks about such + * named, JPA-style, or ordinal) and providing callbacks about such * recognitions. * * @author <a href="mailto:st...@hi...">Steve Ebersole </a> @@ -18,7 +18,7 @@ public void outParameter(int position); public void ordinalParameter(int position); public void namedParameter(String name, int position); - public void ejb3PositionalParameter(String name, int position); + public void jpaPositionalParameter(String name, int position); public void other(char character); } @@ -68,9 +68,9 @@ indx = chopLocation - 1; } else if ( c == '?' ) { - // could be either an ordinal or ejb3-positional parameter + // could be either an ordinal or JPA-positional parameter if ( indx < stringLength - 1 && Character.isDigit( sqlString.charAt( indx + 1 ) ) ) { - // a peek ahead showed this as an ejb3-positional parameter + // a peek ahead showed this as an JPA-positional parameter int right = StringHelper.firstIndexOfChar( sqlString, ParserHelper.HQL_SEPARATORS, indx + 1 ); int chopLocation = right < 0 ? sqlString.length() : right; String param = sqlString.substring( indx + 1, chopLocation ); @@ -79,9 +79,9 @@ new Integer( param ); } catch( NumberFormatException e ) { - throw new QueryException( "ejb3-style positional param was not an integral ordinal" ); + throw new QueryException( "JPA-style positional param was not an integral ordinal" ); } - recognizer.ejb3PositionalParameter( param, indx ); + recognizer.jpaPositionalParameter( param, indx ); indx = chopLocation - 1; } else { Modified: trunk/Hibernate3/src/org/hibernate/engine/query/QueryPlanCache.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/engine/query/QueryPlanCache.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/engine/query/QueryPlanCache.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -133,15 +133,16 @@ ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position.intValue() ); } - Iterator itr = recognizer.getNamedParameterLocationMap().entrySet().iterator(); + Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator(); Map namedParamDescriptorMap = new HashMap(); while( itr.hasNext() ) { final Map.Entry entry = ( Map.Entry ) itr.next(); final String name = ( String ) entry.getKey(); - final List locationList = ( List ) entry.getValue(); + final ParamLocationRecognizer.NamedParameterDescription description = + ( ParamLocationRecognizer.NamedParameterDescription ) entry.getValue(); namedParamDescriptorMap.put( name , - new NamedParameterDescriptor( name, null, ArrayHelper.toIntArray( locationList ) ) + new NamedParameterDescriptor( name, null, description.buildPositionsArray(), description.isJpaStyle() ) ); } Modified: trunk/Hibernate3/src/org/hibernate/impl/AbstractQueryImpl.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/impl/AbstractQueryImpl.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/impl/AbstractQueryImpl.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -702,16 +702,20 @@ return query; } - StringBuffer list = new StringBuffer(16); + StringBuffer list = new StringBuffer( 16 ); Iterator iter = vals.iterator(); - int i=0; + int i = 0; + boolean isJpaPositionalParam = parameterMetadata.getNamedParameterDescriptor( name ).isJpaStyle(); while ( iter.hasNext() ) { - String alias = name + i++ + '_'; - namedParamsCopy.put(alias, new TypedValue( type, iter.next(), session.getEntityMode() ) ); + String alias = ( isJpaPositionalParam ? 'x' + name : name ) + i++ + '_'; + namedParamsCopy.put( alias, new TypedValue( type, iter.next(), session.getEntityMode() ) ); list.append( ParserHelper.HQL_VARIABLE_PREFIX ).append( alias ); - if ( iter.hasNext() ) list.append(", "); + if ( iter.hasNext() ) { + list.append( ", " ); + } } - return StringHelper.replace( query, ParserHelper.HQL_VARIABLE_PREFIX + name, list.toString(), true ); + String paramPrefix = isJpaPositionalParam ? "?" : ParserHelper.HQL_VARIABLE_PREFIX; + return StringHelper.replace( query, paramPrefix + name, list.toString(), true ); } public Query setParameterList(String name, Collection vals) throws HibernateException { Modified: trunk/Hibernate3/src/org/hibernate/loader/custom/sql/SQLQueryParser.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/loader/custom/sql/SQLQueryParser.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/src/org/hibernate/loader/custom/sql/SQLQueryParser.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -246,7 +246,7 @@ result.append( '?' ); } - public void ejb3PositionalParameter(String name, int position) { + public void jpaPositionalParameter(String name, int position) { namedParameter( name, position ); } Modified: trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-06-28 10:02:24 UTC (rev 10066) +++ trunk/Hibernate3/test/org/hibernate/test/hql/ASTParserLoadingTest.java 2006-06-28 13:24:59 UTC (rev 10067) @@ -35,13 +35,10 @@ import org.hibernate.test.any.StringPropertyValue; import org.hibernate.test.any.IntegerPropertyValue; import org.hibernate.test.any.PropertySet; -import org.hibernate.test.legacy.Bar; -import org.hibernate.test.legacy.One; import org.hibernate.test.cid.Customer; import org.hibernate.test.cid.LineItem; import org.hibernate.test.cid.Order; import org.hibernate.test.cid.Product; -import org.hibernate.transform.AliasToBeanResultTransformer; import org.hibernate.transform.DistinctRootEntityResultTransformer; import org.hibernate.transform.Transformers; import org.hibernate.type.ManyToOneType; @@ -67,8 +64,7 @@ private List createdAnimalIds = new ArrayList(); public static Test suite() { - TestSuite suite = new TestSuite( ASTParserLoadingTest.class ); - return suite; + return new TestSuite( ASTParserLoadingTest.class ); } protected String[] getMappings() { @@ -93,6 +89,19 @@ } + public void testJPAPositionalParameterList() { + Session s = openSession(); + s.beginTransaction(); + ArrayList params = new ArrayList(); + params.add( "Doe" ); + params.add( "Public" ); + s.createQuery( "from Human where name.last in (?1)" ) + .setParameterList( "1", params ) + .list(); + s.getTransaction().commit(); + s.close(); + } + public void testComponentQueries() { Session s = openSession(); s.beginTransaction(); |
From: <hib...@li...> - 2006-06-28 10:02:28
|
Author: epbernard Date: 2006-06-28 06:02:24 -0400 (Wed, 28 Jun 2006) New Revision: 10066 Modified: trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/manytomany/Employer.java Log: ANN-362 handle backticks in table name Modified: trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/manytomany/Employer.java =================================================================== --- trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/manytomany/Employer.java 2006-06-28 10:01:48 UTC (rev 10065) +++ trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/manytomany/Employer.java 2006-06-28 10:02:24 UTC (rev 10066) @@ -11,6 +11,7 @@ import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.OrderBy; +import javax.persistence.Table; import org.hibernate.annotations.Cascade; @@ -20,6 +21,7 @@ * @author Emmanuel Bernard */ @Entity() +@Table(name="`Employer`") public class Employer implements Serializable { private Integer id; private Collection employees; |
From: <hib...@li...> - 2006-06-28 10:01:52
|
Author: epbernard Date: 2006-06-28 06:01:48 -0400 (Wed, 28 Jun 2006) New Revision: 10065 Modified: trunk/Hibernate3/src/org/hibernate/cfg/Mappings.java Log: ANN-362 handle backticks in table name Modified: trunk/Hibernate3/src/org/hibernate/cfg/Mappings.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/cfg/Mappings.java 2006-06-28 09:59:05 UTC (rev 10064) +++ trunk/Hibernate3/src/org/hibernate/cfg/Mappings.java 2006-06-28 10:01:48 UTC (rev 10065) @@ -517,7 +517,7 @@ } public String getLogicalTableName(Table table) { - return getLogicalTableName( table.getSchema(), table.getCatalog(), table.getName() ); + return getLogicalTableName( table.getQuotedSchema(), table.getCatalog(), table.getQuotedName() ); } static public class ColumnNames implements Serializable { |
From: <hib...@li...> - 2006-06-28 09:59:10
|
Author: epbernard Date: 2006-06-28 05:59:05 -0400 (Wed, 28 Jun 2006) New Revision: 10064 Modified: trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/query/QueryTest.java Log: fix unit test Modified: trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/query/QueryTest.java =================================================================== --- trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/query/QueryTest.java 2006-06-28 09:44:07 UTC (rev 10063) +++ trunk/HibernateExt/metadata/src/test/org/hibernate/test/annotations/query/QueryTest.java 2006-06-28 09:59:05 UTC (rev 10064) @@ -1,8 +1,6 @@ //$Id$ package org.hibernate.test.annotations.query; -import java.math.BigDecimal; -import java.math.BigInteger; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; @@ -195,7 +193,7 @@ List result = q.list(); assertEquals( 1, result.size() ); Object[] row = (Object[]) result.get( 0 ); - SpaceShip spaceShip = (SpaceShip) row[2]; + SpaceShip spaceShip = (SpaceShip) row[0]; assertEquals( ship.getModel(), spaceShip.getModel() ); assertNotNull( spaceShip.getDimensions() ); assertEquals( ship.getDimensions().getWidth(), spaceShip.getDimensions().getWidth() ); @@ -203,8 +201,8 @@ assertEquals( ship.getCaptain().getFirstname(), ship.getCaptain().getFirstname() ); assertEquals( ship.getCaptain().getLastname(), ship.getCaptain().getLastname() ); //FIXME vary depending on databases - assertEquals( 50d, ( (BigInteger) row[0] ).doubleValue() ); - assertEquals( 500d, ( (BigDecimal) row[1] ).doubleValue() ); + assertTrue( row[1].toString().startsWith( "50" ) ); + assertTrue( row[2].toString().startsWith( "500" ) ); s.delete( spaceShip.getCaptain() ); s.delete( spaceShip ); tx.commit(); |
From: <hib...@li...> - 2006-06-28 09:44:12
|
Author: epbernard Date: 2006-06-28 05:44:07 -0400 (Wed, 28 Jun 2006) New Revision: 10063 Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/CollectionBinder.java Log: Typo in error report Modified: trunk/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/CollectionBinder.java =================================================================== --- trunk/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/CollectionBinder.java 2006-06-28 05:33:05 UTC (rev 10062) +++ trunk/HibernateExt/metadata/src/java/org/hibernate/cfg/annotations/CollectionBinder.java 2006-06-28 09:44:07 UTC (rev 10063) @@ -489,7 +489,7 @@ if ( persistentClass != null && StringHelper.isNotEmpty( this.mappedBy ) ) { try { reversePropertyInJoin = 0 != persistentClass.getJoinNumber( - persistentClass.getProperty( this.mappedBy ) + persistentClass.getRecursiveProperty( this.mappedBy ) ); } catch (MappingException e) { @@ -499,7 +499,7 @@ .append( " in " ) .append( collection.getOwnerEntityName() ) .append( "." ) - .append( this.mappedBy ); + .append( property.getName() ); throw new AnnotationException( error.toString() ); } } |
From: <hib...@li...> - 2006-06-28 05:33:07
|
Author: ste...@jb... Date: 2006-06-28 01:33:05 -0400 (Wed, 28 Jun 2006) New Revision: 10062 Added: trunk/Hibernate3/test/org/hibernate/test/component/Employee.java Log: missed checkin Added: trunk/Hibernate3/test/org/hibernate/test/component/Employee.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/component/Employee.java 2006-06-28 05:20:51 UTC (rev 10061) +++ trunk/Hibernate3/test/org/hibernate/test/component/Employee.java 2006-06-28 05:33:05 UTC (rev 10062) @@ -0,0 +1,38 @@ +package org.hibernate.test.component; + +import java.util.Date; + +/** + * todo: describe Employee + * + * @author Steve Ebersole + */ +public class Employee { + private Long id; + private Person person; + private Date hireDate; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Person getPerson() { + return person; + } + + public void setPerson(Person person) { + this.person = person; + } + + public Date getHireDate() { + return hireDate; + } + + public void setHireDate(Date hireDate) { + this.hireDate = hireDate; + } +} |
From: <hib...@li...> - 2006-06-28 05:21:29
|
Author: ste...@jb... Date: 2006-06-28 01:20:51 -0400 (Wed, 28 Jun 2006) New Revision: 10061 Added: trunk/Hibernate3/src/org/hibernate/criterion/LikeExpression.java Modified: trunk/Hibernate3/src/org/hibernate/criterion/Example.java trunk/Hibernate3/test/org/hibernate/test/criteria/CriteriaQueryTest.java Log: HHH-1847 : added escape option for like operator in QBE (Scott Marlow) Modified: trunk/Hibernate3/src/org/hibernate/criterion/Example.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/criterion/Example.java 2006-06-28 02:53:39 UTC (rev 10060) +++ trunk/Hibernate3/src/org/hibernate/criterion/Example.java 2006-06-28 05:20:51 UTC (rev 10061) @@ -36,6 +36,7 @@ private final Set excludedProperties = new HashSet(); private PropertySelector selector; private boolean isLikeEnabled; + private Character escapeCharacter; private boolean isIgnoreCaseEnabled; private MatchMode matchMode; @@ -85,6 +86,14 @@ } /** + * Set escape character for "like" clause + */ + public Example setEscapeCharacter(Character escapeCharacter) { + this.escapeCharacter = escapeCharacter; + return this; + } + + /** * Set the property selector */ public Example setPropertySelector(PropertySelector selector) { @@ -300,8 +309,18 @@ Criterion crit; if ( propertyValue!=null ) { boolean isString = propertyValue instanceof String; - String op = isLikeEnabled && isString ? " like " : "="; - crit = new SimpleExpression( propertyName, propertyValue, op, isIgnoreCaseEnabled && isString ); + if ( isLikeEnabled && isString ) { + crit = new LikeExpression( + propertyName, + ( String ) propertyValue, + matchMode, + escapeCharacter, + isIgnoreCaseEnabled + ); + } + else { + crit = new SimpleExpression( propertyName, propertyValue, "=", isIgnoreCaseEnabled && isString ); + } } else { crit = new NullExpression(propertyName); Added: trunk/Hibernate3/src/org/hibernate/criterion/LikeExpression.java =================================================================== --- trunk/Hibernate3/src/org/hibernate/criterion/LikeExpression.java 2006-06-28 02:53:39 UTC (rev 10060) +++ trunk/Hibernate3/src/org/hibernate/criterion/LikeExpression.java 2006-06-28 05:20:51 UTC (rev 10061) @@ -0,0 +1,75 @@ +package org.hibernate.criterion; + +import org.hibernate.Criteria; +import org.hibernate.HibernateException; +import org.hibernate.dialect.Dialect; +import org.hibernate.engine.TypedValue; + +/** + * A criterion representing a "like" expression + * + * @author Scott Marlow + * @author Steve Ebersole + */ +public class LikeExpression implements Criterion { + private final String propertyName; + private final Object value; + private final Character escapeChar; + private final boolean ignoreCase; + + protected LikeExpression( + String propertyName, + String value, + Character escapeChar, + boolean ignoreCase) { + this.propertyName = propertyName; + this.value = value; + this.escapeChar = escapeChar; + this.ignoreCase = ignoreCase; + } + + protected LikeExpression( + String propertyName, + String value) { + this( propertyName, value, null, false ); + } + + protected LikeExpression( + String propertyName, + String value, + MatchMode matchMode) { + this( propertyName, matchMode.toMatchString( value ) ); + } + + protected LikeExpression( + String propertyName, + String value, + MatchMode matchMode, + Character escapeChar, + boolean ignoreCase) { + this( propertyName, matchMode.toMatchString( value ), escapeChar, ignoreCase ); + } + + public String toSqlString( + Criteria criteria, + CriteriaQuery criteriaQuery) throws HibernateException { + Dialect dialect = criteriaQuery.getFactory().getDialect(); + String[] columns = criteriaQuery.getColumnsUsingProjection( criteria, propertyName ); + if ( columns.length != 1 ) { + throw new HibernateException( "Like may only be used with single-column properties" ); + } + String lhs = ignoreCase + ? dialect.getLowercaseFunction() + '(' + columns[0] + ')' + : columns[0]; + return lhs + " like ?" + ( escapeChar == null ? "" : " escape \'" + escapeChar + "\'" ); + + } + + public TypedValue[] getTypedValues( + Criteria criteria, + CriteriaQuery criteriaQuery) throws HibernateException { + return new TypedValue[] { + criteriaQuery.getTypedValue( criteria, propertyName, value.toString().toLowerCase() ) + }; + } +} Modified: trunk/Hibernate3/test/org/hibernate/test/criteria/CriteriaQueryTest.java =================================================================== --- trunk/Hibernate3/test/org/hibernate/test/criteria/CriteriaQueryTest.java 2006-06-28 02:53:39 UTC (rev 10060) +++ trunk/Hibernate3/test/org/hibernate/test/criteria/CriteriaQueryTest.java 2006-06-28 05:20:51 UTC (rev 10061) @@ -24,10 +24,10 @@ import org.hibernate.criterion.Property; import org.hibernate.criterion.Restrictions; import org.hibernate.criterion.Subqueries; +import org.hibernate.criterion.Example; import org.hibernate.test.TestCase; import org.hibernate.test.hql.Animal; import org.hibernate.test.hql.Reptile; -import org.hibernate.transform.AliasToBeanResultTransformer; import org.hibernate.transform.Transformers; import org.hibernate.type.Type; import org.hibernate.util.SerializationHelper; @@ -40,7 +40,44 @@ public CriteriaQueryTest(String str) { super(str); } - + + public void testEscapeCharacter() { + Session session = openSession(); + Transaction t = session.beginTransaction(); + Course c1 = new Course(); + c1.setCourseCode( "course-1" ); + c1.setDescription( "%1" ); + Course c2 = new Course(); + c2.setCourseCode( "course-2" ); + c2.setDescription( "%2" ); + Course c3 = new Course(); + c3.setCourseCode( "course-3" ); + c3.setDescription( "control" ); + session.persist( c1 ); + session.persist( c2 ); + session.persist( c3 ); + session.flush(); + session.clear(); + + // finds all courses which have a description equal to '%1' + Course example = new Course(); + example.setDescription( "&%1" ); + List result = session.createCriteria( Course.class ) + .add( Example.create( example ).ignoreCase().enableLike().setEscapeCharacter( new Character( '&' ) ) ) + .list(); + assertEquals( 1, result.size() ); + // finds all courses which contain '%' as the first char in the description + example.setDescription( "&%%" ); + result = session.createCriteria( Course.class ) + .add( Example.create( example ).ignoreCase().enableLike().setEscapeCharacter( new Character( '&' ) ) ) + .list(); + assertEquals( 2, result.size() ); + + session.createQuery( "delete Course" ).executeUpdate(); + t.commit(); + session.close(); + } + public void testScrollCriteria() { Session session = openSession(); Transaction t = session.beginTransaction(); |