You can subscribe to this list here.
2009 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
(27) |
Oct
(132) |
Nov
(94) |
Dec
(135) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2010 |
Jan
(79) |
Feb
(137) |
Mar
(255) |
Apr
(132) |
May
(61) |
Jun
(203) |
Jul
(60) |
Aug
(161) |
Sep
(16) |
Oct
(21) |
Nov
(48) |
Dec
(37) |
2011 |
Jan
(17) |
Feb
(14) |
Mar
(21) |
Apr
(12) |
May
(5) |
Jun
(31) |
Jul
(16) |
Aug
(44) |
Sep
(23) |
Oct
(15) |
Nov
(74) |
Dec
(37) |
2012 |
Jan
(22) |
Feb
(20) |
Mar
(28) |
Apr
(15) |
May
(12) |
Jun
(6) |
Jul
(9) |
Aug
(29) |
Sep
(3) |
Oct
(14) |
Nov
(38) |
Dec
|
2013 |
Jan
(6) |
Feb
(3) |
Mar
(6) |
Apr
(2) |
May
|
Jun
|
Jul
(5) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
2014 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
(1) |
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
From: <amy...@us...> - 2013-01-03 09:05:30
|
Revision: 2165 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2165&view=rev Author: amykrause Date: 2013-01-03 09:05:21 +0000 (Thu, 03 Jan 2013) Log Message: ----------- Test metadata service. Added Paths: ----------- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/AttributeService.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/StatisticsService.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMapping.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMappingService.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataService.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataServiceFactory.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataCollector.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceCardinalityStatistics.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDQPFederation.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDataDictionary.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceFactory.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceTableSchema.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/ServiceException.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/SimpleTableMapping.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataService.java ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataServiceFactory.java Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/AttributeService.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/AttributeService.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/AttributeService.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2012 Royal Observatory, University of Edinburgh, UK + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package uk.ac.roe.wfau.firethorn.ogsadai.metadata; + +import uk.org.ogsadai.dqp.lqp.Attribute; + +/** + * Interface for accessing Attribute(s). + * + */ +public interface AttributeService { + + /** + * Get an Iterable set of Attributes, based on source (table) name. + * + * @param source + * The source (table) alias. <br/> + * This should be the table alias used in SQL queries passed into + * OGSA-DAI, before the mapping from table alias to fully + * qualified resource table name. + * + * @return An Iterable set of Attribute(s) for the source (table). + * + */ + public Iterable<Attribute> getAttributes(String source); + + /** + * Get a specific Attribute, based on source (table) name and Attribute + * (column) name. + * + * @param source + * The source (table) alias. <br/> + * This should be the table alias used in SQL queries passed into + * OGSA-DAI, before the mapping from table alias to fully + * qualified resource table name. + * @param name + * The Attribute(column) name. + * + * @return The specified Attribute, or null if there is no match. + * + */ + public Attribute getAttribute(String source, String name); + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/StatisticsService.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/StatisticsService.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/StatisticsService.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2012 Royal Observatory, University of Edinburgh, UK + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package uk.ac.roe.wfau.firethorn.ogsadai.metadata; + +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; + +/** + * Interface for accessing AttributeStatistics. + * + */ +public interface StatisticsService +{ + /** + * Get the AttributeStatistics for a given Attribute. + * + * @param attribute + * The Attribute to get the corresponding AttributeStatistics + * for. + * @return The AttributeStatistics for the Attribute, or null if there is no + * corresponding AttributeStatistics. + * + */ + public AttributeStatistics getStatistics(Attribute attribute); + + /** + * Get the AttributeStatistics given the source (table) and attribute name. + * + * @param source + * The source (table) alias. <br/> + * This should be the table alias used in SQL queries passed into + * OGSA-DAI, before the mapping from table alias to fully + * qualified resource table name. + * @param name + * The Attribute(column) name. + * + * @return The AttributeStatistics for the corresponding Attribute, or null + * if there is no corresponding Attribute or AttributeStatistics. + * + */ + public AttributeStatistics getStatistics(String source, String name); + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMapping.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMapping.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMapping.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2012 Royal Observatory, University of Edinburgh, UK + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package uk.ac.roe.wfau.firethorn.ogsadai.metadata; + +/** + * An interface for a mapping between table alias (source) and the fully + * qualified table name and target resource. + * + * + */ +public interface TableMapping +{ + /** + * Get the table alias. <br/> + * This is the table alias used in SQL queries passed into OGSA-DAI, before + * the mapping from table alias to fully qualified resource table name. + * + * @return The table alias. + * + */ + public String tableAlias(); + + /** + * Get the fully qualified table name (catalog.schema.table) in the target + * resource. + * + * @return The fully qualified table name. + * + */ + public String tableName(); + + /** + * Get the target resource identifier. + * + * @return The target resource identifier. + * + */ + public String resourceIdent(); + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMappingService.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMappingService.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/ac/roe/wfau/firethorn/ogsadai/metadata/TableMappingService.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2012 Royal Observatory, University of Edinburgh, UK + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package uk.ac.roe.wfau.firethorn.ogsadai.metadata; + +/** + * Interface for accessing TableMapping(s). + * + * + */ +public interface TableMappingService { + /** + * Lookup a TableMapping based on a table alias (source). + * + * @param source + * The (source) table alias. <br/> + * This should be the table alias used in SQL queries passed into + * OGSA-DAI, before the mapping from table alias to fully + * qualified resource table name. + * + * @return The corresponding TableMapping, or null if there is no match + * + */ + public TableMapping getTableMapping(String source); + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataService.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataService.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataService.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,201 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMapping; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.common.msgs.DAILogger; +import uk.org.ogsadai.dqp.common.DataDictionary; +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.PhysicalSchema; +import uk.org.ogsadai.dqp.common.RequestDetails; +import uk.org.ogsadai.dqp.common.TableSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.AttributeImpl; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityUtils; +import uk.org.ogsadai.dqp.lqp.cardinality.StatisticsPhysicalSchema; +import uk.org.ogsadai.dqp.lqp.exceptions.TableNotFoundException; +import uk.org.ogsadai.resource.dataresource.dqp.DQPFederation; + +public class CachedMetadataService +implements AttributeService, TableMappingService, StatisticsService +{ + + private static final DAILogger LOG = + DAILogger.getLogger(CachedMetadataService.class); + + private Map<String, TableMapping> mTableMappings = + new HashMap<String, TableMapping>(); + private Map<String, CardinalityStatistics> mStatistics = + new HashMap<String, CardinalityStatistics>(); + private Map<String, List<Attribute>> mAttributes = + new HashMap<String, List<Attribute>>(); + private DataDictionary mDataDictionary; + + public CachedMetadataService( + RequestDetails details, + DQPFederation federation, + Map<String, DataNode> dataNodes) + { + mDataDictionary = federation.getDataDictionary(details); + Map<DataNode, String> dataNodeIdentifiers = createDataNodeIdentifier(dataNodes); + for (TableSchema tableSchema : mDataDictionary.getTableSchemas()) + { + String tableName = tableSchema.getTableName(); + LOG.debug("Retrieving table schema for '" + tableName + "'"); + List<Attribute> attributes = tableSchema.getSchema().getAttributes(); + mAttributes.put(tableName, attributes); + LOG.debug("Attributes: " + attributes); + // choose the first data node that can access the table + if (tableSchema.getDataNodeTables().isEmpty()) + { + throw new RuntimeException( + "Table " + tableName + " has no data node."); + } + DataNode dataNode = + tableSchema.getDataNodeTables().get(0).getDataNode(); + String localName = null; + try + { + localName = mDataDictionary.getOriginalTableName( + tableName, dataNode); + } + catch (TableNotFoundException e) + { + // this shouldn't happen + throw new RuntimeException(e); + } + TableMapping tableMapping = + new SimpleTableMapping( + tableName, + localName, + dataNodeIdentifiers.get(dataNode)); + LOG.debug("Adding table mapping: " + tableMapping); + + mTableMappings.put(tableName, tableMapping); + initStatistics(tableSchema, attributes); + } + } + + @Override + public AttributeStatistics getStatistics(Attribute attribute) + { + CardinalityStatistics cardStats = mStatistics.get(attribute.getSource()); + if (cardStats == null) + { + LOG.debug("Table statistics not found: " + attribute.getSource()); + return null; + } + return cardStats.getStatistics(attribute); + } + + @Override + public AttributeStatistics getStatistics(String source, String name) + { + Attribute attribute = new AttributeImpl(name, source); + return getStatistics(attribute); + } + + @Override + public TableMapping getTableMapping(String source) + { + return mTableMappings.get(source); + } + + @Override + public Iterable<Attribute> getAttributes(String source) + { + return mAttributes.get(source); + } + + @Override + public Attribute getAttribute(String source, String name) + { + List<Attribute> attributes = mAttributes.get(source); + if (attributes != null) + { + for (Attribute attribute : attributes) + { + if (attribute.getName().equals(name)) + { + return attribute; + } + } + } + return null; + } + + private void initStatistics( + TableSchema tableSchema, + List<Attribute> attributes) + { + LOG.debug("Adding statistics for '" + tableSchema.getTableName() + "'."); + CardinalityStatistics cardStats; + // Go to physical data dictionary and get table scan statistics + PhysicalSchema physicalSchema = tableSchema.getPhysicalSchema(); + + if (physicalSchema == null) + { + // make up a schema + LOG.debug("Adding standard schema."); + cardStats = CardinalityUtils.makeStatisticsFromHeading( + attributes, 100000, 10000); + } + else if (!(physicalSchema instanceof StatisticsPhysicalSchema)) + { + long cardinality = physicalSchema.getCardinality(); + LOG.debug("Adding simple schema with cardinality=" + cardinality); + // Convert simple physical schema into statistics schema. + cardStats = CardinalityUtils.makeStatisticsFromHeading( + attributes, + cardinality, + cardinality/10.0); + } + else + { + LOG.debug("Found statistics schema."); + StatisticsPhysicalSchema statsPhysicalSchema = + (StatisticsPhysicalSchema) physicalSchema; + + cardStats = statsPhysicalSchema.getCardinalityStatistics(); + } + mStatistics.put(tableSchema.getTableName(), cardStats); + } + + /** + * Creates an identifier for the data node. + * + * @param dataNode + * data node + * @return unique identifier for the data node + */ + private Map<DataNode, String> createDataNodeIdentifier( + Map<String, DataNode> dataNodes) + { + Map<DataNode, String> result = new HashMap<DataNode, String>(); + for (Entry<String, DataNode> entry : dataNodes.entrySet()) + { + result.put(entry.getValue(), entry.getKey()); + } + return result; + } + +// private List<Attribute> createLocalAttributes( +// String localTableName, List<Attribute> attributes) +// { +// List<Attribute> result = new ArrayList<Attribute>(attributes.size()); +// for (Attribute attr : attributes) +// { +// result.add(new AttributeImpl(attr.getName(), localTableName)); +// } +// return result; +// } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataServiceFactory.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataServiceFactory.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/CachedMetadataServiceFactory.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,69 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.HashMap; +import java.util.Map; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.RequestDetails; +import uk.org.ogsadai.resource.dataresource.dqp.DQPFederation; + +/** + * Wrapper for the DQP table schema fetcher. + * + * @author The OGSA-DAI Project Team. + */ +public class CachedMetadataServiceFactory implements MetadataServiceFactory +{ + + private CachedMetadataService mService; + private DQPFederation mDQPFederation; + private Map<String, DataNode> mDataNodes = new HashMap<String, DataNode>(); + + @Override + public AttributeService getAttributeService(RequestDetails details) + { + initialise(details); + return mService; + } + + @Override + public TableMappingService getTableMappingService(RequestDetails details) + { + initialise(details); + return mService; + } + + @Override + public StatisticsService getStatisticsService(RequestDetails details) + { + initialise(details); + return mService; + } + + public synchronized void initialise(RequestDetails requestDetails) + { + if (mService == null) + { + mService = + new CachedMetadataService( + requestDetails, mDQPFederation, mDataNodes); + } + } + + public void setFederation(DQPFederation federation) + { + mDQPFederation = federation; + } + + public void setDataNodes(Map<String, DataNode> dataNodes) + { + mDataNodes.clear(); + mDataNodes.putAll(dataNodes); + } + + + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataCollector.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataCollector.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataCollector.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,187 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; + +import uk.org.ogsadai.dqp.lqp.AttributeImpl; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeHistogramBin; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeHistogramBinEndpoint; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeHistogramRange; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.HistogramBasedAttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.ScalarAttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.SimpleCardinalityStatistics; + +public class MetadataCollector +{ + public double getNumValues(Connection connection, String table, String column) + throws SQLException + { + ResultSet resultSet = + executeQuery(connection, + "SELECT count(*) FROM (SELECT DISTINCT " + column + + " FROM " + table + ") t"); + resultSet.next(); + double result = resultSet.getDouble(1); + resultSet.close(); + return result; + } + + public double getNumNulls(Connection connection, String table, String column) + throws SQLException + { + ResultSet resultSet = + executeQuery(connection, + "SELECT count(*) FROM " + table + + " WHERE " + column + " IS NULL"); + resultSet.next(); + double result = resultSet.getDouble(1); + resultSet.close(); + return result; + } + + public HistogramBasedAttributeStatistics createHistograms( + Connection connection, String table, String column, int numBins) + throws SQLException + { + HistogramBasedAttributeStatistics result = + new HistogramBasedAttributeStatistics(); + ResultSet resultSet = + executeQuery(connection, + "SELECT min(" + column + "), max(" + column + ") " + + "FROM " + table); + resultSet.next(); + double min = resultSet.getDouble(1); + double max = resultSet.getDouble(2); + resultSet.close(); + System.out.println("min=" + min + ", max=" + max); + double binSize = (max - min)/numBins; + for (int i=0; i<numBins-1; i++) + { + double lower = min + i*binSize; + double upper = min + (i+1)*binSize; + String query; + if (i == 0) + { + // include min + query = "SELECT count(*) FROM " + table + + " WHERE " + column + " >= " + lower + + " AND " + column + " <= " + upper; + } + else + { + query = "SELECT count(*) FROM " + table + + " WHERE " + column + " > " + lower + + " AND " + column + " <= " + upper; + } + resultSet = executeQuery(connection, query); + resultSet.next(); + double numRows = resultSet.getDouble(1); + resultSet.close(); + resultSet = executeQuery( + connection, + "SELECT count(*) FROM (SELECT DISTINCT " + + column + " FROM " + table + + " WHERE " + column + " > " + lower + + " AND " + column + " < " + upper +") t"); + resultSet.next(); + double numValues = resultSet.getDouble(1); + resultSet.close(); + AttributeHistogramRange range; + if (i == 0) + { + range = new AttributeHistogramRange( + new AttributeHistogramBinEndpoint(lower, true), + new AttributeHistogramBinEndpoint(upper, true)); + } + else + { + range = new AttributeHistogramRange( + new AttributeHistogramBinEndpoint(lower, false), + new AttributeHistogramBinEndpoint(upper, true)); + } + AttributeHistogramBin histogramBin = + new AttributeHistogramBin( + range, numRows, numValues); + result.addBin(histogramBin); + } + return result; + } + + public AttributeStatistics createScalarAttributeStatistics( + Connection connection, String table, String column, double numRows) + throws SQLException + { + double numValues = getNumValues(connection, table, column); + double numNulls = getNumNulls(connection, table, column); + return new ScalarAttributeStatistics(numRows, numValues, numNulls); + } + + /** + * Creates attribute statistics for all columns in the database table. + * + * @param connection + * database connection + * @param table + * table name + * @param numBins + * number of bins to create for histogram statistics + * @return cardinality statistics for the table + * @throws SQLException + */ + public CardinalityStatistics createStatistics( + Connection connection, String table, int numBins) + throws SQLException + { + SimpleCardinalityStatistics result = new SimpleCardinalityStatistics(); + ResultSet resultSet = + executeQuery(connection, "SELECT count(*) FROM " + table); + resultSet.next(); + double numRows = resultSet.getDouble(1); + resultSet.close(); + System.out.println("total number of rows = " + numRows); + resultSet = connection.getMetaData().getColumns(null, null, table, null); + while (resultSet.next()) + { + String column = resultSet.getString(4); + System.out.println("Creating histograms for " + column); + int type = resultSet.getInt(5); + AttributeStatistics histograms = null; + switch (type) + { + // only numeric types + case Types.BIGINT: + case Types.DECIMAL: + case Types.DOUBLE: + case Types.FLOAT: + case Types.INTEGER: + case Types.NUMERIC: + case Types.REAL: + case Types.SMALLINT: + case Types.TINYINT: + histograms = createHistograms(connection, table, column, numBins); + break; + default: + histograms = createScalarAttributeStatistics(connection, table, column, numRows); + break; + } + if (histograms != null) + { + result.addAttributeStatistics( + new AttributeImpl(column, table), histograms); + } + } + return result; + } + + private ResultSet executeQuery(Connection connection, String query) + throws SQLException + { + Statement statement = connection.createStatement(); + return statement.executeQuery(query); + } +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceCardinalityStatistics.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceCardinalityStatistics.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceCardinalityStatistics.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,85 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.AttributeMatchMode; +import uk.org.ogsadai.dqp.lqp.AttributeUtils; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; + +/** + * An implementation of cardinality statistics for a set of attributes which + * retrieves the attribute statistics on request from a statistics service. + * + * @author The OGSA-DAI Project Team. + */ +public class MetadataServiceCardinalityStatistics implements CardinalityStatistics +{ + private StatisticsService mStatisticsService; + private List<Attribute> mAttributes; + + public MetadataServiceCardinalityStatistics( + List<Attribute> attributes, + StatisticsService statisticsService) + { + mStatisticsService = statisticsService; + mAttributes = attributes; + } + + @Override + public double getCardinality() + { + if (mAttributes.isEmpty()) + { + return 0; + } + else + { + Attribute attr = mAttributes.get(0); + AttributeStatistics stats = mStatisticsService.getStatistics(attr); + if (stats == null) + { + throw new RuntimeException("No statistics found for " + attr); + } + return stats.getNumRows(); + } + } + + @Override + public boolean contains(Attribute attr) + { + return AttributeUtils.containsMatching( + attr, mAttributes, AttributeMatchMode.NAME_AND_NULL_SOURCE); + } + + @Override + public AttributeStatistics getStatistics(Attribute attr) + { + return mStatisticsService.getStatistics(attr); + } + + @Override + public Map<Attribute, AttributeStatistics> getStatistics() + { + Map<Attribute, AttributeStatistics> result = + new HashMap<Attribute, AttributeStatistics>(); + for (Attribute attr : mAttributes) + { + result.put(attr, getStatistics(attr)); + } + return result; + } + + @Override + public String toString() + { + return "MetadataServiceCardinalityStatistics[attributes=" + + mAttributes + + ", service=" + mStatisticsService + "]"; + } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDQPFederation.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDQPFederation.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDQPFederation.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,138 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import uk.org.ogsadai.context.OGSADAIContext; +import uk.org.ogsadai.dqp.common.DataDictionary; +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.EvaluationNode; +import uk.org.ogsadai.dqp.common.RequestDetails; +import uk.org.ogsadai.dqp.lqp.udf.FunctionRepository; +import uk.org.ogsadai.dqp.presentation.common.DQPResourceConfigurationException; +import uk.org.ogsadai.resource.dataresource.dqp.DQPFederation; + +public class MetadataServiceDQPFederation implements DQPFederation +{ + + private Set<EvaluationNode> mEvaluationNodes = new HashSet<EvaluationNode>(); + private Map<String, DataNode> mDataNodes = new HashMap<String, DataNode>(); + private EvaluationNode mLocalNode; + private FunctionRepository mFunctionRepository; + private MetadataServiceFactory mMetadataServiceFactory; + + @Override + public Set<EvaluationNode> getEvaluationNodes() + { + return mEvaluationNodes; + } + + @Override + public Set<DataNode> getDataNodes() + { + return new HashSet<DataNode>(mDataNodes.values()); + } + + @Override + public EvaluationNode getLocalNode() + { + return mLocalNode; + } + + @Override + public DataDictionary getDataDictionary(RequestDetails requestDetails) + { + MetadataServiceDataDictionary dataDictionary = + new MetadataServiceDataDictionary(); + dataDictionary.setFederation(this); + dataDictionary.setRequestDetails(requestDetails); + dataDictionary.setTableMappingService( + mMetadataServiceFactory.getTableMappingService(requestDetails)); + dataDictionary.setAttributeService( + mMetadataServiceFactory.getAttributeService(requestDetails)); + dataDictionary.setStatisticsService( + mMetadataServiceFactory.getStatisticsService(requestDetails)); + return dataDictionary; + } + + @Override + public void refreshDataDictionary(RequestDetails requestDetails) + { + // nothing to do - data dictionary is being looked up for each query + } + + protected FunctionRepository getFunctionRepository() + { + if (mFunctionRepository == null) + { + mFunctionRepository = + (FunctionRepository) OGSADAIContext.getInstance().get( + FunctionRepository.FUNCTION_REPOSITORY_KEY); + + } + return mFunctionRepository; + } + + protected Map<String, DataNode> getDataNodesMap() + { + return mDataNodes; + } + + public void setMetadataServiceFactory(MetadataServiceFactory factory) + { + mMetadataServiceFactory = factory; + } + + /** + * Sets the function repository for the federation. If this value is not set + * then the function repository is looked up from the OGSA-DAI context. + * + * @param functionRepository + * function repository + */ + public void setFunctionRepository(FunctionRepository functionRepository) + { + mFunctionRepository = functionRepository; + } + + /** + * Specifies the data nodes in this federation. Data nodes are identified + * by names that the table mapping service uses. + * + * @param dataNodes + * data node mapping + */ + public void setDataNodesMap(Map<String, DataNode> dataNodes) + { + mDataNodes.clear(); + mDataNodes.putAll(dataNodes); + } + + /** + * Specifies the evaluation nodes in this federation. + * + * @param evaluationNodes + * evaluation nodes + */ + public void setEvaluationNodes(Set<EvaluationNode> evaluationNodes) + { + mEvaluationNodes.clear(); + mEvaluationNodes.addAll(evaluationNodes); + for (EvaluationNode node : mEvaluationNodes) + { + if (node.isLocal()) + { + mLocalNode = node; + } + } + if (mLocalNode == null) + { + throw new DQPResourceConfigurationException( + new IllegalArgumentException( + "At least one local evaluation node is needed.")); + } + } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDataDictionary.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDataDictionary.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceDataDictionary.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,178 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMapping; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.dqp.common.DataDictionary; +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.RequestDetails; +import uk.org.ogsadai.dqp.common.TableSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.Heading; +import uk.org.ogsadai.dqp.lqp.HeadingImpl; +import uk.org.ogsadai.dqp.lqp.exceptions.AttributeNotFoundException; +import uk.org.ogsadai.dqp.lqp.exceptions.TableNotFoundException; +import uk.org.ogsadai.dqp.lqp.udf.FunctionRepository; + +public class MetadataServiceDataDictionary implements DataDictionary +{ + /** The DQP federation. */ + private MetadataServiceDQPFederation mFederation; + /** Request details of the client. Not used at the moment. */ + private RequestDetails mRequestDetails; + private TableMappingService mTableMappingService; + private AttributeService mAttributeService; + private StatisticsService mStatisticsService; + + @Override + public Heading getHeading(String tableName) throws TableNotFoundException + { + return new HeadingImpl(getAttributes(tableName)); + } + + @Override + public TableSchema getTableSchema(String tableName) + throws TableNotFoundException + { + TableMapping tableMapping = + mTableMappingService.getTableMapping(tableName); + if (tableMapping == null) + { + throw new TableNotFoundException(tableName); + } + MetadataServiceTableSchema tableSchema = + new MetadataServiceTableSchema( + tableName, + tableMapping.tableName(), + mFederation.getDataNodesMap().get( + tableMapping.resourceIdent())); + tableSchema.setDataDictionary(this); + return tableSchema; +// List<Attribute> attributes = getAttributes(tableName); +// CardinalityStatistics cardStats = +// new MetadataServiceCardinalityStatistics( +// attributes, mStatisticsService); +// SimpleStatisticsPhysicalSchema physicalSchema = +// new SimpleStatisticsPhysicalSchema(tableName, 0); +// physicalSchema.setCardinalityStatistics(cardStats); +// return new SimpleTableSchema( +// tableMapping.tableName(), +// mDataNodes.get(tableMapping.resourceIdent()), +// new SimpleLogicalSchema(tableName, attributes), +// physicalSchema); + } + + public List<Attribute> getAttributes(String tableName) + { + List<Attribute> attributes = new LinkedList<Attribute>(); + Iterable<Attribute> iter = mAttributeService.getAttributes(tableName); + for (Attribute attribute : iter) + { + attributes.add(attribute); + } + return attributes; + } + + @Override + public String getOriginalTableName(String table, DataNode dataNode) + throws TableNotFoundException + { + TableMapping mapping = mTableMappingService.getTableMapping(table); + if (mapping == null) + { + throw new TableNotFoundException(table); + } + return mapping.tableName(); + } + + @Override + public Attribute getAttribute(Attribute attribute) + throws AttributeNotFoundException + { + Attribute result = mAttributeService.getAttribute( + attribute.getSource(), attribute.getName()); + if (result == null) + { + throw new AttributeNotFoundException(attribute); + } + return result; + } + + @Override + public Set<TableSchema> getTableSchemas() + { + // cannot retrieve the names of all available tables? + throw new UnsupportedOperationException(); + } + + @Override + public FunctionRepository getFunctionRepository() + { + return mFederation.getFunctionRepository(); + } + + /** + * Returns the statistics service. + * + * @return statistics service + */ + public StatisticsService getStatisticsService() + { + return mStatisticsService; + } + + public void setRequestDetails(RequestDetails requestDetails) + { + mRequestDetails = requestDetails; + } + + + public void setFederation(MetadataServiceDQPFederation federation) + { + mFederation = federation; + } + + /** + * Sets the table mapping service which maps a table name to its data node + * identifier and a local table name. + * + * @param service + * table mapping service + */ + public void setTableMappingService(TableMappingService service) + { + mTableMappingService = service; + } + + /** + * Set the attribute service that provides attribute metadata to the data + * dictionary. + * + * @param service + * attribute service + */ + public void setAttributeService(AttributeService service) + { + mAttributeService = service; + } + + /** + * Set the attribute statistics service that provides attribute statistics + * to the data dictionary. + * + * @param service + * statistics service + */ + public void setStatisticsService(StatisticsService service) + { + mStatisticsService = service; + } + + + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceFactory.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceFactory.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceFactory.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,42 @@ +package uk.org.ogsadai.dqp.firethorn; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.dqp.common.RequestDetails; + +/** + * A factory class that creates metadata services for a particular request. + * + * @author The OGSA-DAI Project Team. + */ +public interface MetadataServiceFactory +{ + /** + * Returns an attribute service. + * + * @param details + * request details + * @return attribute service for the request + */ + AttributeService getAttributeService(RequestDetails details); + + /** + * Returns a table mapping service. + * + * @param details + * request details + * @return table mapping service for the request + */ + TableMappingService getTableMappingService(RequestDetails details); + + /** + * Returns a statistics service. + * + * @param details + * request details + * @return statistics service for the request + */ + StatisticsService getStatisticsService(RequestDetails details); + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceTableSchema.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceTableSchema.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/MetadataServiceTableSchema.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,114 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.DataNodeTable; +import uk.org.ogsadai.dqp.common.LogicalSchema; +import uk.org.ogsadai.dqp.common.PhysicalSchema; +import uk.org.ogsadai.dqp.common.TableSchema; +import uk.org.ogsadai.dqp.common.simple.SimpleDataNodeTable; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.SimpleStatisticsPhysicalSchema; + +/** + * A table schema that retrieves logical and physical table metadata on demand. + * As it is assumed that the table schema is requested once per query the + * physical and logical schema are cached for the lifetime of this object (ie + * the duration of the query) to avoid querying the metadata service every time, + * for example, when an optimiser requests cardinality estimation, or a table + * appears twice in a query. + * + * @author The OGSA-DAI Project Team. + */ +public class MetadataServiceTableSchema implements TableSchema +{ + /** Name of the table in the federation. */ + private String mTableName; + /** List of data node tables that can access the table. */ + private List<DataNodeTable> mDataNodeTables; + /** The data dictionary which provides access to the metadata services. */ + private MetadataServiceDataDictionary mDataDictionary; + + /** + * Cached copies of the logical and physical schema. If <code>null</code> + * the schemas have not been retrieved from the metadata services yet. + */ + private LogicalSchema mLogicalSchema; + private PhysicalSchema mPhysicalSchema; + + public MetadataServiceTableSchema( + String tableName, + String localName, + DataNode dataNode) + { + mTableName = tableName; + mDataNodeTables = Arrays.<DataNodeTable>asList( + new SimpleDataNodeTable(dataNode, localName)); + } + + public void setDataDictionary(MetadataServiceDataDictionary dataDictionary) + { + mDataDictionary = dataDictionary; + } + + @Override + public List<DataNodeTable> getDataNodeTables() + { + return Collections.unmodifiableList(mDataNodeTables); + } + + @Override + public DataNodeTable getDataNodeTable(DataNode dataNode) + { + for (DataNodeTable dnTable : mDataNodeTables) + { + if (dnTable.getDataNode().equals(dataNode)) + { + return dnTable; + } + } + return null; + } + + @Override + public String getTableName() + { + return mTableName; + } + + @Override + public synchronized LogicalSchema getSchema() + { + if (mLogicalSchema == null) + { + List<Attribute> attributes = + mDataDictionary.getAttributes(mTableName); + mLogicalSchema = new SimpleLogicalSchema(mTableName, attributes); + } + return mLogicalSchema; + } + + @Override + public synchronized PhysicalSchema getPhysicalSchema() + { + if (mPhysicalSchema == null) + { + List<Attribute> attributes = + mDataDictionary.getAttributes(mTableName); + CardinalityStatistics cardStats = + new MetadataServiceCardinalityStatistics( + attributes, mDataDictionary.getStatisticsService()); + SimpleStatisticsPhysicalSchema physicalSchema = + new SimpleStatisticsPhysicalSchema(mTableName, 0); + physicalSchema.setCardinalityStatistics(cardStats); + mPhysicalSchema = physicalSchema; + } + return mPhysicalSchema; + } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/ServiceException.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/ServiceException.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/ServiceException.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,12 @@ +package uk.org.ogsadai.dqp.firethorn; + + +public class ServiceException extends RuntimeException +{ + + public ServiceException(String message, Throwable cause) + { + super(message, cause); + } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/SimpleTableMapping.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/SimpleTableMapping.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/SimpleTableMapping.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,44 @@ +package uk.org.ogsadai.dqp.firethorn; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMapping; + +public class SimpleTableMapping implements TableMapping +{ + + private String mAlias; + private String mLocalName; + private String mResource; + + public SimpleTableMapping( + String alias, String localName, String resourceIdentifier) + { + mAlias = alias; + mLocalName = localName; + mResource = resourceIdentifier; + } + + @Override + public String tableAlias() + { + return mAlias; + } + + @Override + public String tableName() + { + return mLocalName; + } + + @Override + public String resourceIdent() + { + return mResource; + } + + @Override + public String toString() + { + return "TableMapping(alias=" + mAlias + ", local name=" + + mLocalName + ", resource=" + mResource + ")"; + } +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataService.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataService.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataService.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,79 @@ +package uk.org.ogsadai.dqp.firethorn; + +import java.util.HashMap; +import java.util.Map; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMapping; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; + +public class TestMetadataService +implements AttributeService, StatisticsService, TableMappingService +{ + + private Map<String, TableMapping> mTableMappings; + private Map<String, Map<String, Attribute>> mTables; + private Map<String, Map<String, AttributeStatistics>> mStatistics; + + @Override + public AttributeStatistics getStatistics(Attribute attribute) + { + return getStatistics(attribute.getSource(), attribute.getName()); + } + + @Override + public AttributeStatistics getStatistics(String source, String name) + { + return mStatistics.get(source).get(name); + } + + @Override + public Iterable<Attribute> getAttributes(String source) + { + return mTables.get(source).values(); + } + + @Override + public Attribute getAttribute(String source, String name) + { + return mTables.get(source).get(name); + } + + @Override + public TableMapping getTableMapping(String source) + { + return mTableMappings.get(source); + } + + public void add(Attribute attribute) + { + Map<String, Attribute> table = mTables.get(attribute.getSource()); + if (table == null) + { + table = new HashMap<String, Attribute>(); + mTables.put(attribute.getSource(), table); + } + table.put(attribute.getName(), attribute); + } + + public void add(Attribute attribute, AttributeStatistics attrStats) + { + Map<String, AttributeStatistics> table = + mStatistics.get(attribute.getSource()); + if (table == null) + { + table = new HashMap<String, AttributeStatistics>(); + mStatistics.put(attribute.getSource(), table); + } + table.put(attribute.getName(), attrStats); + } + + public void add(TableMapping mapping) + { + mTableMappings.put(mapping.tableName(), mapping); + } + +} Added: ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataServiceFactory.java =================================================================== --- ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataServiceFactory.java (rev 0) +++ ogsa-dai/trunk/extensions/astro/server/src/main/java/uk/org/ogsadai/dqp/firethorn/TestMetadataServiceFactory.java 2013-01-03 09:05:21 UTC (rev 2165) @@ -0,0 +1,36 @@ +package uk.org.ogsadai.dqp.firethorn; + +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.AttributeService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.StatisticsService; +import uk.ac.roe.wfau.firethorn.ogsadai.metadata.TableMappingService; +import uk.org.ogsadai.dqp.common.RequestDetails; + +public class TestMetadataServiceFactory implements MetadataServiceFactory +{ + + private TestMetadataService mTestMetadataService; + + public TestMetadataServiceFactory() + { + mTestMetadataService = new TestMetadataService(); + } + + @Override + public AttributeService getAttributeService(RequestDetails details) + { + return mTestMetadataService; + } + + @Override + public TableMappingService getTableMappingService(RequestDetails details) + { + return mTestMetadataService; + } + + @Override + public StatisticsService getStatisticsService(RequestDetails details) + { + return mTestMetadataService; + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-16 11:04:22
|
Revision: 2164 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2164&view=rev Author: amykrause Date: 2012-11-16 11:04:12 +0000 (Fri, 16 Nov 2012) Log Message: ----------- Checked in by mistake. Removed Paths: ------------- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-16 11:01:42
|
Revision: 2163 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2163&view=rev Author: amykrause Date: 2012-11-16 11:01:34 +0000 (Fri, 16 Nov 2012) Log Message: ----------- Tidying up. Added Paths: ----------- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/ ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/FileStatisticsPhysicalSchemaFetcherTest.java ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/SQLServerHistogramStatisticsSchemaFactoryTest.java Removed Paths: ------------- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/StatisticsCardinalityEstimatingOperatorVisitorTest.java Deleted: ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/StatisticsCardinalityEstimatingOperatorVisitorTest.java =================================================================== --- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/StatisticsCardinalityEstimatingOperatorVisitorTest.java 2012-11-16 10:30:46 UTC (rev 2162) +++ ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/StatisticsCardinalityEstimatingOperatorVisitorTest.java 2012-11-16 11:01:34 UTC (rev 2163) @@ -1,1168 +0,0 @@ -package uk.org.ogsadai.dqp.lqp.cardinality; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -import junit.framework.TestCase; - -import org.easymock.MockControl; - -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; -import uk.org.ogsadai.dqp.common.DataDictionary; -import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; -import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; -import uk.org.ogsadai.dqp.lqp.Attribute; -import uk.org.ogsadai.dqp.lqp.AttributeImpl; -import uk.org.ogsadai.dqp.lqp.CommonPredicate; -import uk.org.ogsadai.dqp.lqp.operators.DifferenceOperator; -import uk.org.ogsadai.dqp.lqp.operators.GroupByOperator; -import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; -import uk.org.ogsadai.dqp.lqp.operators.ProductOperator; -import uk.org.ogsadai.dqp.lqp.operators.SelectOperator; -import uk.org.ogsadai.dqp.lqp.operators.SimpleSelectProjectJoinTableScanQuery; -import uk.org.ogsadai.dqp.lqp.operators.TableScanOperator; -import uk.org.ogsadai.dqp.presentation.common.SimpleDataNode; - -public class StatisticsCardinalityEstimatingOperatorVisitorTest extends TestCase -{ - public void testVisitTableScan() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable"); - - MockControl cardinalityStatisticsControl = - MockControl.createControl(CardinalityStatistics.class); - CardinalityStatistics cardinalityStatistics = - (CardinalityStatistics) cardinalityStatisticsControl.getMock(); - cardinalityStatisticsControl.expectAndReturn( - cardinalityStatistics.getCardinality(), - 1234); - - MockControl statsPhysicalSchemaControl = - MockControl.createControl(StatisticsPhysicalSchema.class); - StatisticsPhysicalSchema physicalSchema = - (StatisticsPhysicalSchema) statsPhysicalSchemaControl.getMock(); - - statsPhysicalSchemaControl.expectAndReturn( - physicalSchema.getCardinalityStatistics(), - cardinalityStatistics); - - cardinalityStatisticsControl.replay(); - statsPhysicalSchemaControl.replay(); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - visitor.visit(tableScan); - - assertEquals( - "tableScan should have stats annotation", - cardinalityStatistics, - tableScan.getAnnotation("CardinalityStatistics")); - - assertEquals( - 1234, - tableScan.getResultCardinality()); - } - - public void testVisitTableScanAndAttrEqConstSelect() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable"); - - SimpleCardinalityStatistics cardinalityStatistics = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("x", "alias_myTable"), histogram); - - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 100, false, 200, 180)); - histogram.addBin( - new AttributeHistogramBin(100, true, 200, false, 400, 390)); - histogram.addBin( - new AttributeHistogramBin(200, true, 300, false, 200, 190)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("y", "alias_myTable"), histogram); - - MockControl statsPhysicalSchemaControl = - MockControl.createControl(StatisticsPhysicalSchema.class); - StatisticsPhysicalSchema physicalSchema = - (StatisticsPhysicalSchema) statsPhysicalSchemaControl.getMock(); - - statsPhysicalSchemaControl.expectAndReturn( - physicalSchema.getCardinalityStatistics(), - cardinalityStatistics); - - statsPhysicalSchemaControl.replay(); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - SelectOperator select = - new SelectOperator(new CommonPredicate("x = 30", null)); - select.setChild(0, tableScan); - - visitor.visit(tableScan); - visitor.visit(select); - - assertEquals( - "tableScan should have stats annotation", - cardinalityStatistics, - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - select.getAnnotation("CardinalityStatistics")); - - System.out.println(select.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 10, - select.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - select.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(10, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(1, xBins.size()); - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(0); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(30.0, min.getPoint()); - assertEquals(30.0, max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(true, max.getIsInclusive()); - assertEquals(1, xBin.getNumValues()); - assertEquals(10, xBin.getNumRows()); - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(10, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - double[] expectedMinValues = { 0, 100, 200 }; - double[] expectedMaxValues = { 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - min = yBin.getRange().getMin(); - max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.4 && bin1ToBin0Ratio < 2.6 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.4 && bin1ToBin2Ratio < 2.6 ); - - // Then lots more things - arithmetic constants e.g. 0-1.0 - - // Put in as daft defaults as before for stuff we don't do - } - - public void testVisitTableScanAndAttrNotEqConstSelect() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable"); - - SimpleCardinalityStatistics cardinalityStatistics = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("x", "alias_myTable"), histogram); - - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 100, false, 200, 180)); - histogram.addBin( - new AttributeHistogramBin(100, true, 200, false, 400, 390)); - histogram.addBin( - new AttributeHistogramBin(200, true, 300, false, 200, 190)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("y", "alias_myTable"), histogram); - - MockControl statsPhysicalSchemaControl = - MockControl.createControl(StatisticsPhysicalSchema.class); - StatisticsPhysicalSchema physicalSchema = - (StatisticsPhysicalSchema) statsPhysicalSchemaControl.getMock(); - - statsPhysicalSchemaControl.expectAndReturn( - physicalSchema.getCardinalityStatistics(), - cardinalityStatistics); - - statsPhysicalSchemaControl.replay(); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - SelectOperator select = - new SelectOperator(new CommonPredicate("NOT (x = 30)", null)); - select.setChild(0, tableScan); - - visitor.visit(tableScan); - visitor.visit(select); - - assertEquals( - "tableScan should have stats annotation", - cardinalityStatistics, - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - select.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 790, - select.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - select.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(790, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(4, xBins.size()); - double[] expectedMinValues = { 0, 20, 40, 60 }; - double[] expectedMaxValues = { 20, 40, 60, 80 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(xBin.getNumValues() <= xBin.getNumRows()); - } - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(790, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - expectedMinValues = new double[] { 0, 100, 200 }; - expectedMaxValues = new double[] { 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - AttributeHistogramBinEndpoint min = yBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.4 && bin1ToBin0Ratio < 2.6 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.4 && bin1ToBin2Ratio < 2.6 ); - } - - public void testVisitTableScanAndAttrLtConstSelect() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable"); - - SimpleCardinalityStatistics cardinalityStatistics = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 50, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("x", "alias_myTable"), histogram); - - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 100, false, 200, 180)); - histogram.addBin( - new AttributeHistogramBin(100, true, 200, false, 400, 390)); - histogram.addBin( - new AttributeHistogramBin(200, true, 300, false, 200, 190)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("y", "alias_myTable"), histogram); - - MockControl statsPhysicalSchemaControl = - MockControl.createControl(StatisticsPhysicalSchema.class); - StatisticsPhysicalSchema physicalSchema = - (StatisticsPhysicalSchema) statsPhysicalSchemaControl.getMock(); - - statsPhysicalSchemaControl.expectAndReturn( - physicalSchema.getCardinalityStatistics(), - cardinalityStatistics); - - statsPhysicalSchemaControl.replay(); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - SelectOperator select = - new SelectOperator(new CommonPredicate("x < 30", null)); - select.setChild(0, tableScan); - - visitor.visit(tableScan); - visitor.visit(select); - - assertNotNull( - "tableScan should have stats annotation", - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - select.getAnnotation("CardinalityStatistics")); - - System.out.println(select.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 150, - select.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - select.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(150, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(2, xBins.size()); - - double[] expectedMinValues = { 0, 20 }; - double[] expectedMaxValues = { 20, 30 }; - long[] expectedNumRows = { 50, 100 }; - long[] expectedNumValues = { 20, 10 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin bin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = bin.getRange().getMin(); - AttributeHistogramBinEndpoint max = bin.getRange().getMax(); - assertEquals(expectedNumRows[i],bin.getNumRows()); - assertEquals(expectedNumValues[i],bin.getNumValues()); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(bin.getNumValues() <= bin.getNumRows()); - } - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(150, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - expectedMinValues = new double[]{ 0, 100, 200 }; - expectedMaxValues = new double[]{ 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - AttributeHistogramBinEndpoint min = yBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.9 && bin1ToBin0Ratio < 2.1 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.9 && bin1ToBin2Ratio < 2.1 ); - } - - public void testVisitTableScanAndSelectOrExpression() throws Exception - { - DataDictionary dataDictionary = createDataDictionary(); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - tableScan.setDataDictionary(dataDictionary); - - SelectOperator select = - new SelectOperator(new CommonPredicate("x < 30 OR x > 60", null)); - select.setChild(0, tableScan); - - visitor.visit(tableScan); - visitor.visit(select); - - assertNotNull( - "tableScan should have stats annotation", - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - select.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 400, - select.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - select.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(400, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(3, xBins.size()); - double[] expectedMinValues = { 0, 20, 60 }; - double[] expectedMaxValues = { 20, 30, 80 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(xBin.getNumValues() <= xBin.getNumRows()); - } - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(400, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - expectedMinValues = new double[] { 0, 100, 200 }; - expectedMaxValues = new double[] { 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - AttributeHistogramBinEndpoint min = yBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.9 && bin1ToBin0Ratio < 2.1 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.9 && bin1ToBin2Ratio < 2.1 ); - } - - public void testVisitTableScanAndKeyAttrEqConstSelect() - { - fail("KNOWN: Test not yet written"); - } - - public void testVisitTableScanAndProduct() - { - DataDictionary dataDictionary = createDataDictionary(); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - ProductOperator product = new ProductOperator(); - product.setChild(0, tableScan); - product.setChild(1, tableScan); - - visitor.visit(tableScan); - visitor.visit(product); - - assertNotNull( - "tableScan should have stats annotation", - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - product.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 640000, - product.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - product.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(640000, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(4, xBins.size()); - double[] expectedMinValues = { 0, 20, 40, 60 }; - double[] expectedMaxValues = { 20, 40, 60, 80 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(xBin.getNumValues() <= xBin.getNumRows()); - } - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(640000, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - expectedMinValues = new double[] { 0, 100, 200 }; - expectedMaxValues = new double[] { 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - AttributeHistogramBinEndpoint min = yBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.9 && bin1ToBin0Ratio < 2.1 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.9 && bin1ToBin2Ratio < 2.1 ); - } - - public void testVisitTableScanAndJoin() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable1"); - - SimpleCardinalityStatistics cardinalityStatistics1 = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics1.addAttributeStatistics( - new AttributeImpl("x1", null), histogram); - - SimpleStatisticsPhysicalSchema physicalSchema = - new SimpleStatisticsPhysicalSchema("myTable1", 800); - physicalSchema.setCardinalityStatistics(cardinalityStatistics1); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - TableMetaDataImpl tableMetaData2 = - new TableMetaDataImpl("cat", "schema", "myTable2"); - - SimpleCardinalityStatistics cardinalityStatistics2 = - new SimpleCardinalityStatistics(); - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(80, true, 100, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(100, true, 120, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(120, true, 140, false, 200, 20)); - cardinalityStatistics2.addAttributeStatistics( - new AttributeImpl("x2", null), histogram); - - physicalSchema = new SimpleStatisticsPhysicalSchema("myTable2", 800); - physicalSchema.setCardinalityStatistics(cardinalityStatistics2); - - SimpleTableSchema dataSchema2 = new SimpleTableSchema( - "cat", dataNode, tableMetaData2, physicalSchema); - dataDictionary.add(dataSchema2); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable1"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - - tableScanQuery = new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable2"); - TableScanOperator tableScan2 = new TableScanOperator(tableScanQuery); - - InnerThetaJoinOperator join = - new InnerThetaJoinOperator( - new CommonPredicate("x1 = x2", null)); - join.setChild(0, tableScan); - join.setChild(1, tableScan2); - - visitor.visit(tableScan); - visitor.visit(tableScan2); - visitor.visit(join); - - assertNotNull( - "tableScan should have stats annotation", - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - join.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 1000, - join.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - join.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x1", null))); - assertTrue(histograms.containsKey(new AttributeImpl("x2", null))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x1", null)); - assertEquals(1000, xHistogram.getNumRows()); - List<AttributeHistogramBin> x1Bins = xHistogram.getBins(); - assertEquals(1, x1Bins.size()); - AttributeHistogramBin x1Bin = - (AttributeHistogramBin) x1Bins.get(0); - AttributeHistogramBinEndpoint min = x1Bin.getRange().getMin(); - AttributeHistogramBinEndpoint max = x1Bin.getRange().getMax(); - assertEquals(60.0, min.getPoint()); - assertEquals(80.0, max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(x1Bin.getNumValues() <= x1Bin.getNumRows()); - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x2", null)); - assertEquals(1000, yHistogram.getNumRows()); - List<AttributeHistogramBin> x2Bins = yHistogram.getBins(); - assertEquals(1, x2Bins.size()); - - AttributeHistogramBin x2Bin = - (AttributeHistogramBin) x2Bins.get(0); - min = x2Bin.getRange().getMin(); - max = x2Bin.getRange().getMax(); - assertEquals(60.0, min.getPoint()); - assertEquals(80.0, max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(x2Bin.getNumValues() <= x2Bin.getNumRows()); - } - - public void testVisitTableScanAndGroupBy() throws Exception - { - DataDictionary dataDictionary = createDataDictionary(); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable"); - TableScanOperator tableScan = new TableScanOperator(tableScanQuery); - tableScan.setDataDictionary(dataDictionary); - - SimpleGroupByOperator groupBy = new SimpleGroupByOperator(); - groupBy.setGroupingAttributes(Arrays.<Attribute>asList(new AttributeImpl("x"))); - groupBy.setChild(0, tableScan); - - visitor.visit(tableScan); - visitor.visit(groupBy); - - assertNotNull( - "tableScan should have stats annotation", - tableScan.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - groupBy.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan.getResultCardinality()); - assertEquals( - 80, - groupBy.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - groupBy.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(2, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", "alias_myTable"))); - assertTrue(histograms.containsKey(new AttributeImpl("y", "alias_myTable"))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", "alias_myTable")); - assertEquals(80, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(4, xBins.size()); - double[] expectedMinValues = { 0, 20, 40, 60 }; - double[] expectedMaxValues = { 20, 40, 60, 80 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(xBin.getNumValues() == xBin.getNumRows()); - } - - HistogramBasedAttributeStatistics yHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("y", "alias_myTable")); - assertEquals(80, yHistogram.getNumRows()); - List<AttributeHistogramBin> yBins = yHistogram.getBins(); - assertEquals(3, yBins.size()); - - expectedMinValues = new double[] { 0, 100, 200 }; - expectedMaxValues = new double[] { 100, 200, 300 }; - - for (int i=0; i<yBins.size(); ++i) - { - AttributeHistogramBin yBin = - (AttributeHistogramBin) yBins.get(i); - AttributeHistogramBinEndpoint min = yBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = yBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(yBin.getNumValues() <= yBin.getNumRows()); - } - - double bin1ToBin0Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(0).getNumRows(); - assertTrue( bin1ToBin0Ratio > 1.9 && bin1ToBin0Ratio < 2.1 ); - double bin1ToBin2Ratio = - (double) yBins.get(1).getNumRows() / - (double) yBins.get(2).getNumRows(); - assertTrue( bin1ToBin2Ratio > 1.9 && bin1ToBin2Ratio < 2.1 ); - } - - public void testVisitTableScanAndDifference() throws Exception - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable1"); - - SimpleCardinalityStatistics cardinalityStatistics1 = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics1.addAttributeStatistics( - new AttributeImpl("x", null), histogram); - - SimpleStatisticsPhysicalSchema physicalSchema = - new SimpleStatisticsPhysicalSchema("myTable1", 800); - physicalSchema.setCardinalityStatistics(cardinalityStatistics1); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - TableMetaDataImpl tableMetaData2 = - new TableMetaDataImpl("cat", "schema", "myTable2"); - - SimpleCardinalityStatistics cardinalityStatistics2 = - new SimpleCardinalityStatistics(); - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(80, true, 100, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(100, true, 120, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(120, true, 140, false, 200, 20)); - cardinalityStatistics2.addAttributeStatistics( - new AttributeImpl("x", null), histogram); - - physicalSchema = new SimpleStatisticsPhysicalSchema("myTable2", 800); - physicalSchema.setCardinalityStatistics(cardinalityStatistics2); - - SimpleTableSchema dataSchema2 = new SimpleTableSchema( - "cat", dataNode, tableMetaData2, physicalSchema); - dataDictionary.add(dataSchema2); - - StatisticsCardinalityEstimatingOperatorVisitor visitor = - new StatisticsCardinalityEstimatingOperatorVisitor(); - visitor.setDataDictionary(dataDictionary); - - SimpleSelectProjectJoinTableScanQuery tableScanQuery = - new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable1"); - TableScanOperator tableScan1 = new TableScanOperator(tableScanQuery); - - tableScanQuery = new SimpleSelectProjectJoinTableScanQuery(); - tableScanQuery.setDataDictionary(dataDictionary); - tableScanQuery.setTableName("alias_myTable2"); - TableScanOperator tableScan2 = new TableScanOperator(tableScanQuery); - - DifferenceOperator difference = new DifferenceOperator(false); - difference.setChild(0, tableScan1); - difference.setChild(1, tableScan2); - - visitor.visit(tableScan1); - visitor.visit(tableScan2); - visitor.visit(difference); - - assertNotNull( - "tableScan1 should have stats annotation", - tableScan1.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "tableScan2 should have stats annotation", - tableScan2.getAnnotation("CardinalityStatistics")); - - assertNotNull( - "select should have stats annotation", - difference.getAnnotation("CardinalityStatistics")); - - assertEquals( - 800, - tableScan1.getResultCardinality()); - assertEquals( - 800, - tableScan2.getResultCardinality()); - assertEquals( - 700, - difference.getResultCardinality()); - - SimpleCardinalityStatistics selectCardStats = - (SimpleCardinalityStatistics) - difference.getAnnotation("CardinalityStatistics"); - - Map<Attribute, AttributeStatistics> histograms = - selectCardStats.getStatistics(); - - assertEquals(1, histograms.size()); - assertTrue(histograms.containsKey(new AttributeImpl("x", null))); - - HistogramBasedAttributeStatistics xHistogram = - (HistogramBasedAttributeStatistics) - histograms.get(new AttributeImpl("x", null)); - assertEquals(700, xHistogram.getNumRows()); - List<AttributeHistogramBin> xBins = xHistogram.getBins(); - assertEquals(4, xBins.size()); - double[] expectedMinValues = { 0, 20, 40, 60 }; - double[] expectedMaxValues = { 20, 40, 60, 80 }; - - for (int i=0; i<xBins.size(); ++i) - { - AttributeHistogramBin xBin = - (AttributeHistogramBin) xBins.get(i); - AttributeHistogramBinEndpoint min = xBin.getRange().getMin(); - AttributeHistogramBinEndpoint max = xBin.getRange().getMax(); - assertEquals(expectedMinValues[i], min.getPoint()); - assertEquals(expectedMaxValues[i], max.getPoint()); - assertEquals(true, min.getIsInclusive()); - assertEquals(false, max.getIsInclusive()); - assertTrue(xBin.getNumValues() <= xBin.getNumRows()); - } - } - - private DataDictionary createDataDictionary() - { - SimpleDataDictionary dataDictionary = new SimpleDataDictionary(); - TableMetaDataImpl tableMetaData = - new TableMetaDataImpl("cat", "schema", "myTable"); - - SimpleCardinalityStatistics cardinalityStatistics = - new SimpleCardinalityStatistics(); - HistogramBasedAttributeStatistics histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 20, false, 100, 20)); - histogram.addBin( - new AttributeHistogramBin(20, true, 40, false, 200, 20)); - histogram.addBin( - new AttributeHistogramBin(40, true, 60, false, 300, 20)); - histogram.addBin( - new AttributeHistogramBin(60, true, 80, false, 200, 20)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("x", "alias_myTable"), histogram); - - histogram = new HistogramBasedAttributeStatistics(); - histogram.addBin( - new AttributeHistogramBin(0, true, 100, false, 200, 180)); - histogram.addBin( - new AttributeHistogramBin(100, true, 200, false, 400, 390)); - histogram.addBin( - new AttributeHistogramBin(200, true, 300, false, 200, 190)); - cardinalityStatistics.addAttributeStatistics( - new AttributeImpl("y", "alias_myTable"), histogram); - SimpleStatisticsPhysicalSchema physicalSchema = - new SimpleStatisticsPhysicalSchema("myTable", 800); - physicalSchema.setCardinalityStatistics(cardinalityStatistics); - - SimpleDataNode dataNode = new SimpleDataNode( - "resourceID", "alias", null); - - SimpleTableSchema dataSchema = new SimpleTableSchema( - "cat", dataNode, tableMetaData, physicalSchema); - dataDictionary.add(dataSchema); - - return dataDictionary; - } - - static class SimpleGroupByOperator extends GroupByOperator - { - public void setGroupingAttributes(List<Attribute> groupingAttributes) - { - mGroupingAttributes = groupingAttributes; - } - } - -} Added: ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/FileStatisticsPhysicalSchemaFetcherTest.java =================================================================== --- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/FileStatisticsPhysicalSchemaFetcherTest.java (rev 0) +++ ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/FileStatisticsPhysicalSchemaFetcherTest.java 2012-11-16 11:01:34 UTC (rev 2163) @@ -0,0 +1,94 @@ +package uk.org.ogsadai.dqp.lqp.cardinality.schema; + +import java.sql.Types; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import junit.framework.TestCase; +import uk.org.ogsadai.converters.databaseschema.ColumnMetaData; +import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; +import uk.org.ogsadai.converters.databaseschema.TableMetaData; +import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; +import uk.org.ogsadai.dqp.common.PhysicalSchema; +import uk.org.ogsadai.dqp.lqp.AttributeImpl; +import uk.org.ogsadai.dqp.lqp.CommonPredicate; +import uk.org.ogsadai.dqp.lqp.Predicate; +import uk.org.ogsadai.dqp.lqp.cardinality.ArithmeticOperator; +import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; +import uk.org.ogsadai.dqp.lqp.cardinality.StatisticsPhysicalSchema; +import uk.org.ogsadai.expression.ArithmeticExpressionOperand; +import uk.org.ogsadai.expression.EqualExpression; +import uk.org.ogsadai.expression.arithmetic.TableColumn; +import uk.org.ogsadai.tuple.TupleTypes; + +public class FileStatisticsPhysicalSchemaFetcherTest extends TestCase +{ + + public void testSimple() throws Exception + { + FileStatisticsPhysicalSchemaFetcher fetcher = new FileStatisticsPhysicalSchemaFetcher(); + Properties properties = new Properties(); + properties.setProperty("UKIDSS,lasSource", "/Users/akrause/astronomy/UKIDSS-stats/"); + fetcher.setDirectories(properties); + TableMetaDataImpl metadata = new TableMetaDataImpl("", "", "lasSource"); + ColumnMetaData[] columns = new ColumnMetaData[7]; + ColumnMetaDataImpl col = new ColumnMetaDataImpl("dec", 1, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[0] = col; + col = new ColumnMetaDataImpl("ra", 2, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[1] = col; + col = new ColumnMetaDataImpl("kAperMag3", 3, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[2] = col; + col = new ColumnMetaDataImpl("hAperMag3", 4, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[3] = col; + col = new ColumnMetaDataImpl("kClass", 5, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[4] = col; + col = new ColumnMetaDataImpl("hClass", 6, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[5] = col; + col = new ColumnMetaDataImpl("hppErrBits", 7, metadata); + col.setTupleType(TupleTypes._DOUBLE); + col.setDataType(Types.DOUBLE); + columns[6] = col; + metadata.setColumns(columns); + fetcher.setTableMetadata(Arrays.<TableMetaData>asList(metadata)); + List<PhysicalSchema> physSchemas = fetcher.fetchPhysicalSchema(null, null); + assertEquals(1, physSchemas.size()); + PhysicalSchema schema = physSchemas.get(0); + assertTrue(schema instanceof StatisticsPhysicalSchema); + CardinalityStatistics stats = + ((StatisticsPhysicalSchema)schema).getCardinalityStatistics(); + AttributeStatistics attrStats = + stats.getStatistics(new AttributeImpl("ra", "UKIDSS_lasSource")); + System.out.println(attrStats); + System.out.println(attrStats.getNumRows()); + attrStats.processOperatorConstant( + ArithmeticOperator.LESS_THAN, 150.0); + System.out.println(attrStats.getNumRows()); + attrStats.processOperatorConstant( + ArithmeticOperator.GREATER_THAN_OR_EQUAL, 130.0); + System.out.println(attrStats.getNumRows()); + System.out.println(stats.getCardinality()); + stats = stats.processSelect( + new CommonPredicate( + new EqualExpression( + new ArithmeticExpressionOperand( + new TableColumn("hClass", "UKIDSS_lasSource")), + new ArithmeticExpressionOperand( + new TableColumn("kClass", "UKIDSS_lasSource"))))); + System.out.println(stats.getCardinality()); + } + +} Added: ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/SQLServerHistogramStatisticsSchemaFactoryTest.java =================================================================== --- ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/SQLServerHistogramStatisticsSchemaFactoryTest.java (rev 0) +++ ogsa-dai/trunk/extensions/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/cardinality/schema/SQLServerHistogramStatisticsSchemaFactoryTest.java 2012-11-16 11:01:34 UTC (rev 2163) @@ -0,0 +1,24 @@ +package uk.org.ogsadai.dqp.lqp.cardinality.schema; + +import java.io.File; + +import uk.org.ogsadai.dqp.lqp.AttributeImpl; +import uk.org.ogsadai.dqp.lqp.cardinality.HistogramBasedAttributeStatistics; +import uk.org.ogsadai.tuple.TupleTypes; + +import junit.framework.TestCase; + +public class SQLServerHistogramStatisticsSchemaFactoryTest extends TestCase +{ + + public void testReadDec() throws Exception + { + File histogram = new File("/Users/akrause/astronomy/UKIDSS-stats/lasSource-dec-stats.csv"); + HistogramBasedAttributeStatistics statistics = + SQLServerHistogramStatisticsSchemaFactory.createStatisticsForTable( + new AttributeImpl("x", TupleTypes._DOUBLE, "lasSource_dec"), + histogram); + System.out.println(statistics); + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-16 10:30:53
|
Revision: 2162 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2162&view=rev Author: amykrause Date: 2012-11-16 10:30:46 +0000 (Fri, 16 Nov 2012) Log Message: ----------- Tidying up. Modified Paths: -------------- ogsa-dai/trunk/server-tests/presentation/core/src/test/ext/java/uk/org/ogsadai/test/server/DataSinkServiceTest.java ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java Modified: ogsa-dai/trunk/server-tests/presentation/core/src/test/ext/java/uk/org/ogsadai/test/server/DataSinkServiceTest.java =================================================================== --- ogsa-dai/trunk/server-tests/presentation/core/src/test/ext/java/uk/org/ogsadai/test/server/DataSinkServiceTest.java 2012-11-16 09:48:24 UTC (rev 2161) +++ ogsa-dai/trunk/server-tests/presentation/core/src/test/ext/java/uk/org/ogsadai/test/server/DataSinkServiceTest.java 2012-11-16 10:30:46 UTC (rev 2162) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2007-2010. +// Copyright (c) The University of Edinburgh, 2007-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +19,6 @@ import java.util.Date; import junit.framework.TestCase; - import uk.org.ogsadai.client.toolkit.DataRequestExecutionResource; import uk.org.ogsadai.client.toolkit.DataSinkResource; import uk.org.ogsadai.client.toolkit.DataValueIterator; @@ -52,8 +51,6 @@ import uk.org.ogsadai.resource.ResourceID; import uk.org.ogsadai.resource.request.RequestExecutionStatus; import uk.org.ogsadai.resource.request.RequestStatus; -import uk.org.ogsadai.test.server.ServerTestProperties; -import uk.org.ogsadai.test.server.TestServerProxyFactory; /** * Data sink-related server tests. This class expects @@ -88,7 +85,7 @@ { /** Copyright notice. */ private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2007-2010."; + "Copyright (c) The University of Edinburgh, 2007-2012."; /** Instance of Date object for testing purposes. */ private static Date mInstanceOfDate = new Date(); @@ -328,9 +325,11 @@ catch (RequestExecutionException e) { Message[] errors = deliverToDataSink.getErrorMessages(); - assertTrue(errors.length==1); - assertEquals(ErrorID.SERVER_ERROR_WITH_HOST, - errors[0].getID()); + assertTrue(errors.length==3); + assertEquals(ErrorID.GENERAL_ACTIVITY_USER_EXCEPTION, + errors[0].getID()); + assertEquals(ErrorID.SERVER_COMMS_ERROR, + errors[1].getID()); requestResource = e.getRequestResource(); } status = requestResource.getRequestStatus(); Modified: ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java =================================================================== --- ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java 2012-11-16 09:48:24 UTC (rev 2161) +++ ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java 2012-11-16 10:30:46 UTC (rev 2162) @@ -1580,208 +1580,207 @@ public void testNotInLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + listCompare(expected, actual); + } } - } public void testNotInRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; - - ResultSet expected = executeQueryDirect(sqlDirect); - ResultSet actual = execute(sqlDQP); - - setCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; + + ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + + setCompare(expected, actual); + } } - } public void testCorrInLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " - + "WHERE t.tinyintT = smallintT and smallintT > 80)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT IN (SELECT s.smallintT FROM MySQLResource_mysql_types s " - + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " + + "WHERE t.tinyintT = smallintT and smallintT > 80)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT IN (SELECT s.smallintT FROM MySQLResource_mysql_types s " + + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testCorrInRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " - + "WHERE t.tinyintT = smallintT and smallintT > 80)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types t " - + "WHERE tinyintT IN (SELECT smallintT FROM MySQLResource_mysql_types s " - + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " + + "WHERE t.tinyintT = smallintT and smallintT > 80)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types t " + + "WHERE tinyintT IN (SELECT smallintT FROM MySQLResource_mysql_types s " + + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testCorrNotInLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.smallintT AND s.smallintT < 90)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.smallintT AND s.smallintT < 90)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testCorrNotInRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM MySQLResource_mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM MySQLResource_mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testAnyLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM mysql_types WHERE smallintT > 80)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM MySQLResource_mysql_types s WHERE s.smallintT > 80)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM mysql_types WHERE smallintT > 80)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM MySQLResource_mysql_types s WHERE s.smallintT > 80)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testAnyRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM mysql_types WHERE smallintT > 80)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM MySQLResource_mysql_types WHERE smallintT > 80)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM mysql_types WHERE smallintT > 80)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM MySQLResource_mysql_types WHERE smallintT > 80)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testAllLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT tinyintT, smallintT FROM mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; - - String sqlDQP = "SELECT tinyintT, smallintT FROM MySQLResource_mysql_types " - + "WHERE tinyintT < ALL (SELECT s.smallintT FROM MySQLResource_mysql_types s)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT tinyintT, smallintT FROM mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; + + String sqlDQP = "SELECT tinyintT, smallintT FROM MySQLResource_mysql_types " + + "WHERE tinyintT < ALL (SELECT s.smallintT FROM MySQLResource_mysql_types s)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } - } public void testAllRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM MySQLResource_mysql_types)"; - - ResultSet expected = executeQueryDirect(sqlDirect); - ResultSet actual = execute(sqlDQP); - - setCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM MySQLResource_mysql_types)"; + + ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + + setCompare(expected, actual); + } } - } public void testCorrAnyLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT < ANY (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT < ANY (SELECT s.smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - setCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT < ANY (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT < ANY (SELECT s.smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + setCompare(expected, actual); + } } - } public void testCorrAnyRemote() throws Exception { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-16 09:48:35
|
Revision: 2161 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2161&view=rev Author: amykrause Date: 2012-11-16 09:48:24 +0000 (Fri, 16 Nov 2012) Log Message: ----------- Jenkins config for reference. Added Paths: ----------- test-framework/trunk/jenkins/dqp-server-tests/config.xml Added: test-framework/trunk/jenkins/dqp-server-tests/config.xml =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/config.xml (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/config.xml 2012-11-16 09:48:24 UTC (rev 2161) @@ -0,0 +1,89 @@ +<?xml version='1.0' encoding='UTF-8'?> +<project> + <actions/> + <description></description> + <logRotator> + <daysToKeep>30</daysToKeep> + <numToKeep>-1</numToKeep> + <artifactDaysToKeep>-1</artifactDaysToKeep> + <artifactNumToKeep>-1</artifactNumToKeep> + </logRotator> + <keepDependencies>false</keepDependencies> + <properties/> + <scm class="hudson.scm.SubversionSCM"> + <locations> + <hudson.scm.SubversionSCM_-ModuleLocation> + <remote>https://ogsa-dai.svn.sourceforge.net/svnroot/ogsa-dai/ogsa-dai/trunk</remote> + <local>ogsa-dai/trunk</local> + </hudson.scm.SubversionSCM_-ModuleLocation> + <hudson.scm.SubversionSCM_-ModuleLocation> + <remote>https://ogsa-dai.svn.sourceforge.net/svnroot/ogsa-dai/third-party</remote> + <local>third-party</local> + </hudson.scm.SubversionSCM_-ModuleLocation> + <hudson.scm.SubversionSCM_-ModuleLocation> + <remote>https://ogsa-dai.svn.sourceforge.net/svnroot/ogsa-dai/test-framework/trunk/jenkins</remote> + <local>ogsa-dai/test-framework/trunk/jenkins</local> + </hudson.scm.SubversionSCM_-ModuleLocation> + <hudson.scm.SubversionSCM_-ModuleLocation> + <remote>https://ogsa-dai.svn.sourceforge.net/svnroot/ogsa-dai/sandbox</remote> + <local>sandbox</local> + </hudson.scm.SubversionSCM_-ModuleLocation> + </locations> + <excludedRegions></excludedRegions> + <includedRegions></includedRegions> + <excludedUsers></excludedUsers> + <excludedRevprop></excludedRevprop> + <excludedCommitMessages></excludedCommitMessages> + <workspaceUpdater class="hudson.scm.subversion.UpdateUpdater"/> + </scm> + <canRoam>true</canRoam> + <disabled>false</disabled> + <blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding> + <blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding> + <triggers class="vector"> + <hudson.triggers.TimerTrigger> + <spec>@midnight</spec> + </hudson.triggers.TimerTrigger> + </triggers> + <concurrentBuild>false</concurrentBuild> + <builders> + <hudson.tasks.Ant> + <targets></targets> + <buildFile>ogsa-dai/trunk/release-scripts/ogsa-dai/jersey/build.xml</buildFile> + <properties>env.OGSA_DAI_DEPENDENCIES=$WORKSPACE/third-party/dependencies +env.OGSA_DAI_PACKAGES=/localdisk/home/ogsadaitest/third-party/packages</properties> + </hudson.tasks.Ant> + <hudson.tasks.Ant> + <targets>buildModules</targets> + <buildFile>ogsa-dai/test-framework/trunk/jenkins/dqp-server-tests/build.xml</buildFile> + <properties>env.OGSA_DAI_DEPENDENCIES=$WORKSPACE/third-party/dependencies +env.OGSA_DAI_PACKAGES=/localdisk/home/ogsadaitest/third-party/packages</properties> + </hudson.tasks.Ant> + <hudson.tasks.Ant> + <targets>installAll</targets> + <buildFile>ogsa-dai/test-framework/trunk/jenkins/dqp-server-tests/build.xml</buildFile> + <properties>env.OGSA_DAI_DEPENDENCIES=$WORKSPACE/third-party/dependencies +env.OGSA_DAI_PACKAGES=/localdisk/home/ogsadaitest/third-party/packages</properties> + </hudson.tasks.Ant> + <hudson.tasks.Ant> + <targets>runTests</targets> + <buildFile>ogsa-dai/test-framework/trunk/jenkins/dqp-server-tests/build.xml</buildFile> + <properties>env.OGSA_DAI_DEPENDENCIES=$WORKSPACE/third-party/dependencies +env.OGSA_DAI_PACKAGES=/localdisk/home/ogsadaitest/third-party/packages</properties> + </hudson.tasks.Ant> + <hudson.tasks.Ant> + <targets>stopAll</targets> + <buildFile>ogsa-dai/test-framework/trunk/jenkins/dqp-server-tests/build.xml</buildFile> + <properties>env.OGSA_DAI_DEPENDENCIES=$WORKSPACE/ogsa-dai/third-party/dependencies +env.OGSA_DAI_PACKAGES=/localdisk/home/ogsadaitest/third-party/packages</properties> + </hudson.tasks.Ant> + </builders> + <publishers> + <hudson.tasks.junit.JUnitResultArchiver> + <testResults>ogsa-dai/trunk/server-tests/**/TEST-*.xml</testResults> + <keepLongStdio>false</keepLongStdio> + <testDataPublishers/> + </hudson.tasks.junit.JUnitResultArchiver> + </publishers> + <buildWrappers/> +</project> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-15 15:48:20
|
Revision: 2160 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2160&view=rev Author: amykrause Date: 2012-11-15 15:48:10 +0000 (Thu, 15 Nov 2012) Log Message: ----------- More details. Modified Paths: -------------- test-framework/trunk/jenkins/dqp-server-tests/README.txt Modified: test-framework/trunk/jenkins/dqp-server-tests/README.txt =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/README.txt 2012-11-15 14:37:48 UTC (rev 2159) +++ test-framework/trunk/jenkins/dqp-server-tests/README.txt 2012-11-15 15:48:10 UTC (rev 2160) @@ -4,7 +4,17 @@ 1. Set up MySQL, create database 'dqp_test' and load the DQP test data from ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/main/data/dqp_test.sql.zip for example: - mysql -u yourusername -p yourpasswd dqp_test < dqp_test.sql + + $ mysql -u yourusername -p yourpasswd + + mysql> create database dqp_test; + Query OK, 1 row affected (0.04 sec) + + mysql> quit; + Bye + + $ mysql -u yourusername -p yourpasswd dqp_test < dqp_test.sql + 2. Create a new job on Jenkins. (NOTE: The project name must not contain spaces) 3. Copy config.xml to the the job directory. 4. Point browser to the Jenkins webapp and reload configuration from disk. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-15 14:38:00
|
Revision: 2159 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2159&view=rev Author: amykrause Date: 2012-11-15 14:37:48 +0000 (Thu, 15 Nov 2012) Log Message: ----------- DQP system tests. Added Paths: ----------- test-framework/trunk/jenkins/dqp-server-tests/ test-framework/trunk/jenkins/dqp-server-tests/DQPCompilerConfiguration.xml test-framework/trunk/jenkins/dqp-server-tests/README.txt test-framework/trunk/jenkins/dqp-server-tests/ant.properties test-framework/trunk/jenkins/dqp-server-tests/build.xml test-framework/trunk/jenkins/dqp-server-tests/dqp.server.test.properties test-framework/trunk/jenkins/dqp-server-tests/dqpServer.configure test-framework/trunk/jenkins/dqp-server-tests/ogsadaiOne.configure test-framework/trunk/jenkins/dqp-server-tests/ogsadaiTwo.configure Added: test-framework/trunk/jenkins/dqp-server-tests/DQPCompilerConfiguration.xml =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/DQPCompilerConfiguration.xml (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/DQPCompilerConfiguration.xml 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,252 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<beans xmlns="http://www.springframework.org/schema/beans" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns:context="http://www.springframework.org/schema/context" + xsi:schemaLocation="http://www.springframework.org/schema/beans + http://www.springframework.org/schema/beans/spring-beans-3.0.xsd + http://www.springframework.org/schema/context + http://www.springframework.org/schema/context/spring-context-3.0.xsd"> + + <bean id="cardinalityEstimator" + class="uk.org.ogsadai.dqp.lqp.cardinality.CardinalityEstimator"/> + + <bean id="partitioner" + class="uk.org.ogsadai.dqp.lqp.optimiser.partitioner.PartitioningOptimiser"/> + + <bean id="tableScanImplosion" + class="uk.org.ogsadai.dqp.lqp.optimiser.implosion.TableScanImplosionOptimiser"> + <property name="cardinalityEstimator" ref="cardinalityEstimator"/> + </bean> + + <!-- ************************** + Compiler Configuration + ************************** --> + + <bean id="compilerConfiguration" + class="uk.org.ogsadai.resource.dataresource.dqp.SimpleCompilerConfiguration"> + + <!-- + This section specifies the chain of optimisers that will rewrite the + query plan. Optimisers will be chained in the specification order. + --> + <property name="optimisationChain"> + <list> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.decorrelate.DecorrelationOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.select.SelectPushDownOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.rename.RenamePullUpOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.project.ProjectPullUpOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.project.groupby.InsertProjectAfterGroupByOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.project.pushdown.ProjectPushDownOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.project.redundant.RemoveRedundantProjectOptimiser" /> + <bean class="uk.org.ogsadai.dqp.lqp.cardinality.CardinalityEstimatorOptimiser"> + <property name="cardinalityEstimator" ref="cardinalityEstimator"/> + </bean> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.JoinOrderingOptimiser" > + <property name="partitioner" ref="partitioner"/> + <property name="cardinalityEstimator" ref="cardinalityEstimator"/> + </bean> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.partitioner.PartitioningOptimiser" /> + <ref bean="tableScanImplosion"/> + <!-- + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.implosion.ExtendedTableScanImplosionOptimiser"> + <property name="partitioner" ref="partitioner"/> + <property name="cardinalityEstimator" ref="cardinalityEstimator"/> + <property name="tableScanImplosionOptimiser" ref="tableScanImplosion"/> + </bean> + --> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.JoinImplementationOptimiser"> + <property name="joinImplementations"> + <list> + <!-- + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.EquiJoinBatch"> + <property name="batchSize" value="1000" /> + </bean> + --> + <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.InMemoryJoin"/> +<!-- <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.DerbyStreamingJoin"/> --> +<!-- <bean class="uk.org.ogsadai.dqp.lqp.optimiser.join.DerbyJoin"/> --> + </list> + </property> + </bean> + </list> + </property> + + <!-- + Each operator appearing in the logical query plan must have an + associated builder class. This section defines mapping between an + operator and a builder. + --> + <property name="operatorBuilders"> + <map> + <entry key="DUPLICATE_ELIMINATION"> + <map> <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.DuplicateEliminationBuilder" /> </entry> </map> + </entry> + <entry key="DIFFERENCE"> + <map> <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.DifferenceBuilder"/> </entry> </map> + </entry> + <entry key="PULL_EXCHANGE_CONSUMER"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.PullExchangeConsumerBuilder"/></entry></map> + </entry> + <entry key="PULL_EXCHANGE_PRODUCER"> + <map> <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.PullExchangeProducerBuilder"/></entry></map> + </entry> + <entry key="PUSH_EXCHANGE_CONSUMER"> + <map> <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.PushExchangeConsumerBuilder"/></entry></map> + </entry> + <entry key="PUSH_EXCHANGE_PRODUCER"> + <map> <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.PushExchangeProducerBuilder"/></entry></map> + </entry> + <entry key="GROUP_BY"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.GroupByBuilder"/></entry></map> + </entry> + <entry key="INNER_THETA_JOIN"> + <map> + <entry key="DEFAULT"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.ProductSelectBuilder"> + <!-- <property name="resourceID" value="DerbyResource"/> --> + </bean> + </entry> + <entry key="PRIMARY_EXPRESSION"> <bean class="uk.org.ogsadai.dqp.execute.workflow.ThetaJoinBuilder"/> </entry> + <entry key="DERBY_STREAMING_JOIN"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.DerbyStreamingJoinBuilder"> + <property name="resourceID" value="DerbyResource"/> + </bean> + </entry> + <entry key="DERBY_JOIN"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.DerbyJoinBuilder"> + <property name="resourceID" value="DerbyResource"/> + </bean> + </entry> + </map> + </entry> + <entry key="LEFT_OUTER_JOIN"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.LeftOuterJoinBuilder"/></entry></map> + </entry> + <entry key="NIL"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.NilBuilder"/></entry></map> + </entry> + <entry key="ONE_ROW_ONLY"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.OneRowOnlyBuilder"/></entry></map> + </entry> + <entry key="PRODUCT"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.ProductBuilder"/></entry></map> + </entry> + <entry key="PROJECT"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.ProjectBuilder"/></entry></map> + </entry> + <!-- + <entry key="QUERY_APPLY"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.QueryApplyBuilder"/></entry></map> + </entry> + --> + <entry key="RENAME"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.RenameBuilder"/></entry></map> + </entry> + <entry key="SCALAR_GROUP_BY"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.ScalarGroupByBuilder"/></entry></map> + </entry> + <entry key="SELECT"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.SelectBuilder"/></entry></map> + </entry> + <entry key="SEMI_JOIN"> + <map><entry key="DEFAULT"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.SemiJoinBuilder"> + <property name="resourceID" value="DerbyResource"/> + </bean> + </entry></map> + </entry> + <entry key="ANTI_SEMI_JOIN"> + <map><entry key="DEFAULT"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.AntiJoinBuilder"> + <property name="resourceID" value="DerbyResource"/> + </bean> + </entry></map> + </entry> + <entry key="SORT"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.SortBuilder"/></entry></map> + </entry> + <entry key="TABLE_SCAN"> + <map> + <entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.TableScanBuilder"/></entry> + <entry key="filteredTableScan"> + <bean class="uk.org.ogsadai.dqp.execute.workflow.BatchTableScanBuilder"> + <property name="tableScanImplosionOptimiser" ref="tableScanImplosion"/> + </bean> + </entry> + </map> + </entry> + <!-- + <entry key="FILTERED_TABLE_SCAN"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.FilteredTableScanBuilder"/></entry></map> + </entry> + --> + <entry key="UNION"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.UnionBuilder"/></entry></map> + </entry> + <entry key="SCAN_BIND_APPLY"> + <map><entry key="DEFAULT"> <bean class="uk.org.ogsadai.dqp.execute.workflow.ScanBindApplyBuilder"/></entry></map> + </entry> + </map> + </property> + + <!-- + This object builds a logical query plan from a user query. + --> + <property name="queryPlanBuilder"> + <bean class="uk.org.ogsadai.dqp.execute.SQLQueryPlanBuilder"/> + </property> + + <!-- + Table scan query factory element specifies a class implementing the + TableScanQueryFactory interface. Objects of this class are used to + create the TableScanQuery objects that respresent a query within a + table scan operator. Supporting in plug-in factory decouples the + concrete type of the TableScanQuery object from the table scan operator. + This allows alternative (possibly more sophisticated) implosion + optimisers to be written that require more functionality from the + concrete implementation of the TableScanQuery object. + --> + <property name="tableScanQueryFactory"> + <bean class="uk.org.ogsadai.dqp.lqp.operators.SimpleSelectProjectJoinTableScanQueryFactory" /> + </property> + + <!-- + Cardinality estimator property specifies an object implementing the + CardinalityEstimator interface. It may be used by optimisers to get + estimates of cardinalities of intermediate relations in the query plan. + --> + <!-- + <property name="cardinalityEstimator" ref="cardinalityEstimator"/> + --> + + <!-- + Function operators specify mapping of relation valued functions + (functions that appear in the FROM_LIST of the query statement) to + operator functions. + --> + <property name="functionOperators"> + <map value-type="java.lang.Class"> + <entry key="outerUnion"><value>uk.org.ogsadai.dqp.lqp.operators.extra.OuterUnionOperator</value></entry> + </map> + </property> + + <!-- + Coordinator extensions are used to perform some processing after + partitions are created. Extension code can be executed before and/or + after the workflow is submitted for execution by the coordinator. This + extensibility can be used for example to gather post execution + monitoring data. + --> + <!-- + <property name="coordinatorExtensions"> + <list> + <bean class="" /> + <bean class="" /> + </list> + </property> + --> + + </bean> <!-- Compiler configuration --> + +</beans> Added: test-framework/trunk/jenkins/dqp-server-tests/README.txt =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/README.txt (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/README.txt 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,23 @@ +DQP Jersey Server tests +======================= + +1. Set up MySQL, create database 'dqp_test' and load the DQP test data from + ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/main/data/dqp_test.sql.zip + for example: + mysql -u yourusername -p yourpasswd dqp_test < dqp_test.sql +2. Create a new job on Jenkins. (NOTE: The project name must not contain spaces) +3. Copy config.xml to the the job directory. +4. Point browser to the Jenkins webapp and reload configuration from disk. +5. Configure the job: + In 'Build' modify build step 'Invoke Ant' by clicking on 'Advanced...' + Replace '$WORKSPACE' with the correct location of OGSA-DAI dependencies and + packages directories if required. +6. Copy the build script and configure scripts to another location on the server + and edit the configure scripts: + - dqpServer.configure + - ogsadaiOne.configure + - ogsadaiTwo.configure + Make sure that the MySQL JDBC connection is correct (host, port, database name), + and provide username and password, for example + Login permit Resource2 ANY yourusername "yourpasswd" +7. Run 'Build Now' or adjust the Build Triggers. \ No newline at end of file Added: test-framework/trunk/jenkins/dqp-server-tests/ant.properties =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/ant.properties (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/ant.properties 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,4 @@ +jdbc.driver.path=${env.WORKSPACE}/third-party/dependencies/mysql/mysql-connector/5.0.4/mysql-connector-java-5.0.4-bin.jar +derby.driver.path=${env.WORKSPACE}/third-party/dependencies/org/apache/derby/10.8.2.2/derby-10.8.2.2.jar +test.tomcat.version=7.0.22 +dqp.server.test.properties=dqp.server.test.properties Added: test-framework/trunk/jenkins/dqp-server-tests/build.xml =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/build.xml (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/build.xml 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,266 @@ +<?xml version="1.0"?> + +<!-- Copyright (c) The University of Edinburgh, 2012. --> +<!-- + LICENCE-START + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + LICENCE-END +--> +<project name="dqp-jersey-server-test-suite"> + + <!-- Environment variables --> + <property environment="env"/> + <!-- Dependencies --> + <property name="dependencies.dir" value="${env.OGSA_DAI_DEPENDENCIES}"/> + <!-- Packages --> + <property name="packages.dir" value="${env.OGSA_DAI_PACKAGES}"/> + <!-- SVN trunk --> + <property name="trunk.dir" value="${env.WORKSPACE}/ogsa-dai/trunk"/> + <!-- SVN sandbox --> + <property name="sandbox.dir" value="${env.WORKSPACE}/sandbox"/> + <!-- Tomcat installation directory --> + <property name="tmp.test.dir" value="${env.WORKSPACE}/testing"/> + <!-- Load ant.properties --> + <property file="${basedir}/ant.properties"/> + + <target name="clean"> + <mkdir dir="${tmp.test.dir}"/> + <delete> + <fileset dir="${tmp.test.dir}"> + <include name="**/*"/> + </fileset> + </delete> + </target> + + <target name="install"> + <delete failonerror="false" + dir="${tmp.test.dir}/tomcat-${test.tomcat.port}"/> + <mkdir dir="${tmp.test.dir}/tomcat-${test.tomcat.port}"/> + <unzip dest="${tmp.test.dir}/tomcat-${test.tomcat.port}"> + <fileset dir="${packages.dir}/tomcat/${test.tomcat.version}"> + <include name="apache-tomcat-*.zip"/> + </fileset> + </unzip> + <!-- Changing various ports --> + <replace file="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/conf/server.xml" + token="8080" + value="${test.tomcat.port}"> + </replace> + <replace file="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/conf/server.xml" + token="8005" + value="${test.tomcat.shutdown.port}"> + </replace> + <replace file="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/conf/server.xml" + token="8009" + value="${test.tomcat.other.port}"> + </replace> + + <!-- Installing OGSA-DAI release, assumed to be built already --> + <echo message="Installing OGSA-DAI from ${release.bin.dir}"/> + <copy todir="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps"> + <fileset dir="${release.bin.dir}"> + <include name="dai.war"/> + <include name="dai/**/*"/> + </fileset> + </copy> + + <echo message="Configuring OGSA-DAI release"/> + <!-- Set the host service URL - required by the Jersey version --> + <replace + file="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps/dai/WEB-INF/classes/ogsadai-context.xml" + token="</beans>" + value="<bean id="uk.org.ogsadai.WEB_SERVER_URL" class="java.lang.String"> <constructor-arg value="http://localhost:${test.tomcat.port}/dai"/> </bean> </beans>" /> + <!-- Run the configure script for this instance --> + <ant dir="${release.bin.dir}" target="configure"> + <property name="tomcat.dir" value="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/"/> + <property name="config.file" value="${ogsadai.config.file}"/> + </ant> + <!-- Copy the JDBC driver to the webapp --> + <copy file="${jdbc.driver.path}" + todir="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps/dai/WEB-INF/lib" /> + <copy file="${derby.driver.path}" + todir="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps/dai/WEB-INF/lib" /> + <!-- Copy DQP server sandbox jar to the webapp --> + <copy todir="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps/dai/WEB-INF/lib" + overwrite="true"> + <fileset dir="${sandbox.dir}/dqp/server/build/lib"> + <exclude name="*Tests*"/> + </fileset> + </copy> + <!-- Copy DQP compiler configuration to the webapp --> + <copy file="${basedir}/DQPCompilerConfiguration.xml" + todir="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}/webapps/dai/WEB-INF/etc/dai/dqp/" + overwrite="true"/> + + <!-- Start Tomcat and wait for index page to appear --> + <antcall target="startTomcat"> + <param name="apache.tomcat7" + value="${tmp.test.dir}/tomcat-${test.tomcat.port}/apache-tomcat-${test.tomcat.version}"/> + </antcall> + <echo message="Waiting for OGSA-DAI index page..."/> + <waitfor> + <http url="http://localhost:${test.tomcat.port}/dai/dai-index.jsp"/> + </waitfor> + + </target> + + <target name="startTomcat"> + <chmod perm="u+rwx" dir="${apache.tomcat7}/bin" includes="*.sh"/> + <exec dir="${apache.tomcat7}" executable="bin/startup.sh"/> + </target> + + <target name="stopTomcat"> + <exec dir="${apache.tomcat7}" executable="bin/shutdown.sh"/> + </target> + + <target name="buildModule"> + <ant dir="${trunk.dir}/${module.name}" + target="clean" /> + <ant dir="${trunk.dir}/${module.name}" + target="jar" /> + <ant dir="${trunk.dir}/${module.name}" + target="jarUnitTests" /> + <ant dir="${trunk.dir}/${module.name}" + target="jarExtTests" /> + </target> + + <target name="buildDQPsandbox"> + <ant dir="${sandbox.dir}/${module.name}" + target="clean" /> + <ant dir="${sandbox.dir}/${module.name}" + target="jar" /> + <ant dir="${sandbox.dir}/${module.name}" + target="jarUnitTests" /> + <ant dir="${sandbox.dir}/${module.name}" + target="jarExtTests" /> + </target> + + <target name="buildModules" description="Builds all modules required for tests"> + <antcall target="buildModule"> + <param name="module.name" value="core/common/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="core/clientserver/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="core/client/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="core/server/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="data"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/basic/client/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/basic/server/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/relational/client/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/relational/server/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/dqp/bindings/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="extensions/dqp/client/"/> + </antcall> + <antcall target="buildDQPsandbox"> + <param name="module.name" value="dqp/server/"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="presentation/jersey/common"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="presentation/jersey/client"/> + </antcall> + </target> + + <!-- + Installs three Tomcats with OGSA-DAI and configures them. + --> + <target name="installAll" + depends="clean" + description="Installs test OGSA-DAI instances on Tomcat and starts them up"> + + <!-- find the path of the binary release build --> + <first id="bindir"> + <dirset dir="${trunk.dir}/release-scripts/ogsa-dai/jersey/build/" + includes="*-src/build/*-bin/" /> + </first> + + <property name="release.bin.dir" value="${toString:bindir}" /> + <antcall target="install"> + <param name="test.tomcat.port" value="12000"/> + <param name="test.tomcat.shutdown.port" value="12001"/> + <param name="test.tomcat.other.port" value="12002"/> + <param name="ogsadai.config.file" value="${basedir}/dqpServer.configure"/> + </antcall> + <antcall target="install"> + <param name="test.tomcat.port" value="12010"/> + <param name="test.tomcat.shutdown.port" value="12011"/> + <param name="test.tomcat.other.port" value="12012"/> + <param name="ogsadai.config.file" value="${basedir}/ogsadaiOne.configure"/> + </antcall> + <antcall target="install"> + <param name="test.tomcat.port" value="12020"/> + <param name="test.tomcat.shutdown.port" value="12021"/> + <param name="test.tomcat.other.port" value="12022"/> + <param name="ogsadai.config.file" value="${basedir}/ogsadaiTwo.configure"/> + </antcall> + </target> + + <target name="runTests" description="Runs the DQP system tests"> + <copy file="dqp.server.test.properties" + tofile="${env.WORKSPACE}/tmp/ogsadai.test.properties"/> + <antcall target="buildModule"> + <param name="module.name" value="server-tests/common"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="server-tests/core"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="server-tests/presentation/jersey"/> + </antcall> + <antcall target="buildModule"> + <param name="module.name" value="server-tests/presentation/extensions/dqp"/> + </antcall> + <ant dir="${trunk.dir}/server-tests/presentation/extensions/dqp" + target="runExtTests"> + <property name="ogsadai.test.properties" + value="${env.WORKSPACE}/tmp/ogsadai.test.properties"/> + <property name="${test.report.dir}" + value="${env.WORKSPACE}/test-results/"/> + </ant> + </target> + + <target name="stopAll" description="Stops all Tomcat instances"> + <antcall target="stopTomcat"> + <param name="apache.tomcat7" + value="${tmp.test.dir}/tomcat-12000/apache-tomcat-${test.tomcat.version}"/> + </antcall> + <antcall target="stopTomcat"> + <param name="apache.tomcat7" + value="${tmp.test.dir}/tomcat-12010/apache-tomcat-${test.tomcat.version}"/> + </antcall> + <antcall target="stopTomcat"> + <param name="apache.tomcat7" + value="${tmp.test.dir}/tomcat-12020/apache-tomcat-${test.tomcat.version}"/> + </antcall> + </target> + +</project> \ No newline at end of file Added: test-framework/trunk/jenkins/dqp-server-tests/dqp.server.test.properties =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/dqp.server.test.properties (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/dqp.server.test.properties 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,12 @@ +jdbc.connection.url=jdbc:mysql://localhost:3306/dqp_test +jdbc.driver.class=org.gjt.mm.mysql.Driver +jdbc.user.name=root +jdbc.password= +jdbc.column.type.mapper.class=uk.org.ogsadai.resource.dataresource.jdbc.MySQLColumnTypeMapper +server.url=http://localhost:12000/dai/services +server.drer.id=DataRequestExecutionResource +server.proxy.factory=uk.org.ogsadai.test.server.jersey.JerseyServerProxyFactory +server.dqp.resource.id=DQPResource +server.dqp.resource.template.id=uk.org.ogsadai.DQP_RESOURCE_TEMPLATE +server.dqp.jdbc.resource.id=MySQLResource +server.dqp.jdbc.table=student Added: test-framework/trunk/jenkins/dqp-server-tests/dqpServer.configure =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/dqpServer.configure (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/dqpServer.configure 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,38 @@ +DQP deploy DQPResource +DQP addEvaluationNode DQPResource dqpServer http://localhost:12000/dai/services DataRequestExecutionResource dataSources dataSinks true uk.org.ogsadai.dqp.presentation.jersey.JerseyEvaluationNode +DQP addEvaluationNode DQPResource ogsadaiOne http://localhost:12010/dai/services DataRequestExecutionResource dataSources dataSinks false uk.org.ogsadai.dqp.presentation.jersey.JerseyEvaluationNode +DQP addEvaluationNode DQPResource ogsadaiTwo http://localhost:12020/dai/services DataRequestExecutionResource dataSources dataSinks false uk.org.ogsadai.dqp.presentation.jersey.JerseyEvaluationNode +DQP addDataNode DQPResource dqpServer MySQLResource +DQP addDataNode DQPResource ogsadaiOne Resource2 +DQP addDataNode DQPResource ogsadaiTwo Resource3 +DQP addDataNode DQPResource ogsadaiTwo Resource4 +Resource addActivity DataRequestExecutionResource uk.org.ogsadai.CreateDQPResource uk.org.ogsadai.CreateDQPResource +Activity add uk.org.ogsadai.CreateDQPResource uk.org.ogsadai.activity.dqp.CreateDQPResourceActivity +Activity addConfig uk.org.ogsadai.CreateDQPResource dqp.config.dir dqp +Activity addConfig uk.org.ogsadai.CreateDQPResource dai.template.id uk.org.ogsadai.DQP_RESOURCE_TEMPLATE +Activity addConfig uk.org.ogsadai.CreateDQPResource dqp.context.template DQPContextTemplate.xml +Activity addConfig uk.org.ogsadai.CreateDQPResource dqp.evaluation.node.factory uk.org.ogsadai.dqp.presentation.jersey.JerseyEvaluationNodeFactory + +JDBC deploy MySQLResource jdbc:mysql://localhost:3306/dqp_test org.gjt.mm.mysql.Driver +Login permit MySQLResource ANY root "" +Resource addConfig MySQLResource dai.column.type.mapper.class uk.org.ogsadai.resource.dataresource.jdbc.MySQLColumnTypeMapper + +JDBC deploy DerbyResource jdbc:derby:derbydb;create=true org.apache.derby.jdbc.EmbeddedDriver +Login permit DerbyResource ANY "" "" +Activity add uk.org.ogsadai.TupleJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleJoin uk.org.ogsadai.TupleJoinDerby +Activity add uk.org.ogsadai.TupleSemiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleSemiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbySemiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleSemiJoin uk.org.ogsadai.TupleSemiJoinDerby +Activity add uk.org.ogsadai.TupleAntiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleAntiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyAntiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleAntiJoin uk.org.ogsadai.TupleAntiJoinDerby +Activity add uk.org.ogsadai.TupleProductDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleProductDerby join.implementation uk.org.ogsadai.tuple.join.DerbyProduct +Resource addActivity DerbyResource uk.org.ogsadai.TupleProduct uk.org.ogsadai.TupleProductDerby +Activity add uk.org.ogsadai.TupleLeftOuterJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleLeftOuterJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyOuterJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleLeftOuterJoin uk.org.ogsadai.TupleLeftOuterJoinDerby +Activity add uk.org.ogsadai.TupleSortDerby uk.org.ogsadai.activity.derby.DerbySortActivity +Resource addActivity DerbyResource uk.org.ogsadai.TupleSort uk.org.ogsadai.TupleSortDerby Added: test-framework/trunk/jenkins/dqp-server-tests/ogsadaiOne.configure =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/ogsadaiOne.configure (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/ogsadaiOne.configure 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,23 @@ +JDBC deploy Resource2 jdbc:mysql://localhost:3306/dqp_test org.gjt.mm.mysql.Driver +Login permit Resource2 ANY root "" +Resource addConfig Resource2 dai.column.type.mapper.class uk.org.ogsadai.resource.dataresource.jdbc.MySQLColumnTypeMapper + +JDBC deploy DerbyResource jdbc:derby:derbydb;create=true org.apache.derby.jdbc.EmbeddedDriver +Login permit DerbyResource ANY "" "" +Activity add uk.org.ogsadai.TupleJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleJoin uk.org.ogsadai.TupleJoinDerby +Activity add uk.org.ogsadai.TupleSemiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleSemiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbySemiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleSemiJoin uk.org.ogsadai.TupleSemiJoinDerby +Activity add uk.org.ogsadai.TupleAntiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleAntiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyAntiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleAntiJoin uk.org.ogsadai.TupleAntiJoinDerby +Activity add uk.org.ogsadai.TupleProductDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleProductDerby join.implementation uk.org.ogsadai.tuple.join.DerbyProduct +Resource addActivity DerbyResource uk.org.ogsadai.TupleProduct uk.org.ogsadai.TupleProductDerby +Activity add uk.org.ogsadai.TupleLeftOuterJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleLeftOuterJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyOuterJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleLeftOuterJoin uk.org.ogsadai.TupleLeftOuterJoinDerby +Activity add uk.org.ogsadai.TupleSortDerby uk.org.ogsadai.activity.derby.DerbySortActivity +Resource addActivity DerbyResource uk.org.ogsadai.TupleSort uk.org.ogsadai.TupleSortDerby Added: test-framework/trunk/jenkins/dqp-server-tests/ogsadaiTwo.configure =================================================================== --- test-framework/trunk/jenkins/dqp-server-tests/ogsadaiTwo.configure (rev 0) +++ test-framework/trunk/jenkins/dqp-server-tests/ogsadaiTwo.configure 2012-11-15 14:37:48 UTC (rev 2159) @@ -0,0 +1,27 @@ +JDBC deploy Resource3 jdbc:mysql://localhost:3306/dqp_test org.gjt.mm.mysql.Driver +Login permit Resource3 ANY root "" +Resource addConfig Resource3 dai.column.type.mapper.class uk.org.ogsadai.resource.dataresource.jdbc.MySQLColumnTypeMapper + +JDBC deploy Resource4 jdbc:mysql://localhost:3306/dqp_test org.gjt.mm.mysql.Driver +Login permit Resource4 ANY root "" +Resource addConfig Resource4 dai.column.type.mapper.class uk.org.ogsadai.resource.dataresource.jdbc.MySQLColumnTypeMapper + +JDBC deploy DerbyResource jdbc:derby:derbydb;create=true org.apache.derby.jdbc.EmbeddedDriver +Login permit DerbyResource ANY "" "" +Activity add uk.org.ogsadai.TupleJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleJoin uk.org.ogsadai.TupleJoinDerby +Activity add uk.org.ogsadai.TupleSemiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleSemiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbySemiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleSemiJoin uk.org.ogsadai.TupleSemiJoinDerby +Activity add uk.org.ogsadai.TupleAntiJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleAntiJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyAntiJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleAntiJoin uk.org.ogsadai.TupleAntiJoinDerby +Activity add uk.org.ogsadai.TupleProductDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleProductDerby join.implementation uk.org.ogsadai.tuple.join.DerbyProduct +Resource addActivity DerbyResource uk.org.ogsadai.TupleProduct uk.org.ogsadai.TupleProductDerby +Activity add uk.org.ogsadai.TupleLeftOuterJoinDerby uk.org.ogsadai.activity.derby.DerbyStreamingJoinActivity +Activity addConfig uk.org.ogsadai.TupleLeftOuterJoinDerby join.implementation uk.org.ogsadai.tuple.join.DerbyOuterJoin +Resource addActivity DerbyResource uk.org.ogsadai.TupleLeftOuterJoin uk.org.ogsadai.TupleLeftOuterJoinDerby +Activity add uk.org.ogsadai.TupleSortDerby uk.org.ogsadai.activity.derby.DerbySortActivity +Resource addActivity DerbyResource uk.org.ogsadai.TupleSort uk.org.ogsadai.TupleSortDerby This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-15 12:07:48
|
Revision: 2158 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2158&view=rev Author: amykrause Date: 2012-11-15 12:07:42 +0000 (Thu, 15 Nov 2012) Log Message: ----------- Fixed implementation of Derby join. Modified Paths: -------------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyJoinTest.java Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyJoinTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyJoinTest.java 2012-11-15 11:58:36 UTC (rev 2157) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyJoinTest.java 2012-11-15 12:07:42 UTC (rev 2158) @@ -85,6 +85,24 @@ join.close(); } + public void testSameColumn() throws Exception + { + // Configure the join + DerbyJoin join = new DerbyJoin(); + join.setResource(getResource()); + join.storeRightTuples(false); + join.setCondition(getExpression("r1.id < r2.id AND r2.id > -100")); + configureMetadata(join); + + // Store some tuples + join.storeTuples(getStoredTuples()); + + // Now test lots of Tuples + Iterable<Tuple> tuples = join.join(new SimpleTuple(5, 20.0)); + checkResults(tuples, 0, 4); + join.close(); + } + public void testLessOrEqualThan() throws Exception { // Configure the join This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-15 11:58:47
|
Revision: 2157 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2157&view=rev Author: amykrause Date: 2012-11-15 11:58:36 +0000 (Thu, 15 Nov 2012) Log Message: ----------- Fixed implementation of Derby join. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/AntiJoinBuilder.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyAntiJoin.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyJoin.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/AntiJoinBuilder.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/AntiJoinBuilder.java 2012-11-14 16:08:21 UTC (rev 2156) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/AntiJoinBuilder.java 2012-11-15 11:58:36 UTC (rev 2157) @@ -68,11 +68,13 @@ if (mResourceID != null) join.setResourceID(mResourceID); join.createInput("data1"); join.createInput("data2"); + join.createInput("readFirst"); join.createInput("condition"); join.createOutput("result", GenericActivity.LIMITED_VALIDATION); join.connectInput("data1", outputLeft); join.connectInput("data2", outputRight); join.addInput("condition", sql); + join.addInput("readFirst", "data2"); builder.add(join); return join.getOutput("result"); Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyAntiJoin.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyAntiJoin.java 2012-11-14 16:08:21 UTC (rev 2156) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyAntiJoin.java 2012-11-15 11:58:36 UTC (rev 2157) @@ -24,11 +24,18 @@ public class DerbyAntiJoin extends DerbyJoin { - + @Override public TupleMetadata getJoinMetadata() { - return mRight; + if (mStoreLeft) + { + return mRight; + } + else + { + return mLeft; + } } @Override Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyJoin.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyJoin.java 2012-11-14 16:08:21 UTC (rev 2156) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyJoin.java 2012-11-15 11:58:36 UTC (rev 2157) @@ -33,6 +33,10 @@ import uk.org.ogsadai.activity.io.TupleListIterator; import uk.org.ogsadai.activity.sql.SQLUtilities; import uk.org.ogsadai.common.msgs.DAILogger; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.dqp.lqp.AttributeImpl; +import uk.org.ogsadai.dqp.lqp.RenameMap; +import uk.org.ogsadai.dqp.lqp.SimpleRenameMap; import uk.org.ogsadai.expression.ExpressionUtils; import uk.org.ogsadai.expression.IncomparableTypesException; import uk.org.ogsadai.resource.dataresource.jdbc.JDBCConnectionProvider; @@ -240,37 +244,43 @@ private PreparedStatement createCandidateStatement(TupleMetadata metadata) throws SQLException { - String joinCondition = ExpressionUtils.generateSQL(mCondition); - mColumns = new ArrayList<Integer>(); - // replace all columns from the input metadata with '?' + List<Attribute> original = new ArrayList<Attribute>(metadata.getColumnCount()); + List<Attribute> renamed = new ArrayList<Attribute>(metadata.getColumnCount()); for (int i=0; i<metadata.getColumnCount(); i++) { ColumnMetadata column = metadata.getColumnMetadata(i); - String columnName = column.getName(); - if (column.getTableName() != null && !column.getTableName().isEmpty()) - { - columnName = column.getTableName() + "." + columnName; - } - if (joinCondition.contains(columnName)) - { - mColumns.add(i); - joinCondition = joinCondition.replace(columnName, "?"); - } + original.add(new AttributeImpl(column.getName(), column.getTableName())); + renamed.add(new AttributeImpl("?{" + i + "}")); } for (int i=0; i<mStoredMetadata.getColumnCount(); i++) { ColumnMetadata column = mStoredMetadata.getColumnMetadata(i); - String columnName = column.getName(); - String replace = column.getName(); if (column.getTableName() != null && !column.getTableName().isEmpty()) { - columnName = column.getTableName() + "." + columnName; - replace = column.getTableName() + "_" + replace; + original.add(new AttributeImpl(column.getName(), column.getTableName())); + renamed.add(new AttributeImpl(column.getTableName() + "_" + column.getName())); } - joinCondition = joinCondition.replace(columnName, replace); } + RenameMap renameMap = new SimpleRenameMap(original, renamed); + ExpressionUtils.renameUsedAttributes(mCondition, renameMap); + String joinCondition = ExpressionUtils.generateSQL(mCondition); + StringBuilder preparedCondition = new StringBuilder(); + mColumns = new ArrayList<Integer>(); + int beginIndex = joinCondition.indexOf("?{", 0) + "?{".length(); + int endIndex = -1; + while (beginIndex >= 2) + { + preparedCondition.append(joinCondition.substring(endIndex+1, beginIndex-1)); + endIndex = joinCondition.indexOf("}", beginIndex); + int columnIndex = + Integer.parseInt( + joinCondition.substring(beginIndex, endIndex)); + mColumns.add(columnIndex); + beginIndex = joinCondition.indexOf("?{", endIndex) + "?{".length(); + } + preparedCondition.append(joinCondition.substring(endIndex+1, joinCondition.length())); String sql = "SELECT * FROM " + mTable.getName() + - " WHERE " + joinCondition; + " WHERE " + preparedCondition; LOG.debug("Created candidate select statement: " + sql); return mConnection.prepareStatement(sql); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 16:08:32
|
Revision: 2156 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2156&view=rev Author: amykrause Date: 2012-11-14 16:08:21 +0000 (Wed, 14 Nov 2012) Log Message: ----------- Fixed bug in product. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyProduct.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyProduct.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyProduct.java 2012-11-14 15:45:26 UTC (rev 2155) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/tuple/join/DerbyProduct.java 2012-11-14 16:08:21 UTC (rev 2156) @@ -19,9 +19,17 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.util.Set; import uk.org.ogsadai.activity.ActivityUserException; +import uk.org.ogsadai.dqp.lqp.Attribute; +import uk.org.ogsadai.expression.Expression; +import uk.org.ogsadai.expression.ExpressionEvaluationException; +import uk.org.ogsadai.expression.ExpressionVisitor; +import uk.org.ogsadai.tuple.ColumnNotFoundException; import uk.org.ogsadai.tuple.Tuple; +import uk.org.ogsadai.tuple.TupleMetadata; +import uk.org.ogsadai.tuple.TypeMismatchException; /** * Stores data input of the product in a database and retrieves it for each @@ -31,6 +39,39 @@ */ public class DerbyProduct extends DerbyJoin { + public DerbyProduct() + { + mCondition = new Expression() { + + @Override + public Boolean evaluate(Tuple tuple) throws ExpressionEvaluationException + { + return true; + } + + @Override + public void configure(TupleMetadata metadata, + Set<Attribute> correlatedAttributes) + throws ColumnNotFoundException, TypeMismatchException + { + // ignore + } + + @Override + public void configure(TupleMetadata metadata) + throws ColumnNotFoundException, TypeMismatchException + { + // ignore + } + + @Override + public void accept(ExpressionVisitor visitor) + { + // ignore + } + }; + } + @Override protected ResultSetIterator getCandidateMatches(Tuple tuple) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 15:45:38
|
Revision: 2155 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2155&view=rev Author: amykrause Date: 2012-11-14 15:45:26 +0000 (Wed, 14 Nov 2012) Log Message: ----------- Tidying up. Added Paths: ----------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyProductTest.java Added: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyProductTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyProductTest.java (rev 0) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyProductTest.java 2012-11-14 15:45:26 UTC (rev 2155) @@ -0,0 +1,89 @@ +// Copyright (c) The University of Edinburgh, 2012. +// +// LICENCE-START +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// LICENCE-END + +package uk.org.ogsadai.tuple.join; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +import junit.framework.TestCase; +import uk.org.ogsadai.tuple.SimpleTuple; +import uk.org.ogsadai.tuple.Tuple; + +/** + * Test class for DerbySemiJoin. + * + * @author The OGSA-DAI Project Team + */ +public class DerbyProductTest extends TestCase +{ + /** Copyright notice. */ + private static final String COPYRIGHT_NOTICE = + "Copyright (c) The University of Edinburgh, 2012"; + + public DerbyProductTest(String name) + { + super(name); + } + + public void testProduct() throws Exception + { + DerbyProduct join = new DerbyProduct(); + join.setResource(DerbyJoinTest.getResource()); + join.storeRightTuples(false); + DerbyJoinTest.configureMetadata(join); + + List<Tuple> stored = getStoredTuples(); + join.storeTuples(stored); + + Tuple expected = new SimpleTuple(5, 20.0); + Iterator<Tuple> tuples = join.join(expected).iterator(); + for (Tuple tuple : stored) + { + assertTrue(tuples.hasNext()); + Tuple actual = tuples.next(); + int columnCount = tuple.getColumnCount(); + for (int i=0; i<columnCount; i++) + { + assertEquals(tuple.getObject(i), actual.getObject(i)); + } + for (int i=0; i<expected.getColumnCount(); i++) + { + assertEquals(expected.getObject(i), actual.getObject(i+columnCount)); + } + } + assertFalse(tuples.hasNext()); + join.close(); + } + + private List<Tuple> getStoredTuples() + { + int[] ids = {7, 0, 2, 1, 4, 3, 5, 6, 8, 9}; + double[] values = + {0.1, 2.5, 45.0, -23.3, 7.66, 7.88, 3.5, 6.8, 9.2, 123.0}; + + List<Tuple> result = new LinkedList<Tuple>(); + + for (int i=0; i<ids.length; ++i) + { + result.add(new SimpleTuple(ids[i],values[i])); + } + + return result; + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 15:44:39
|
Revision: 2154 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2154&view=rev Author: amykrause Date: 2012-11-14 15:44:29 +0000 (Wed, 14 Nov 2012) Log Message: ----------- Tidying up. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/activity/derby/DerbyStreamingJoinActivity.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/BatchTableScanBuilder.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/presentation/common/XMLDQPResourceConfiguration.java Removed Paths: ------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/ProductJoin.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/activity/derby/DerbyStreamingJoinActivity.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/activity/derby/DerbyStreamingJoinActivity.java 2012-11-14 14:09:12 UTC (rev 2153) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/activity/derby/DerbyStreamingJoinActivity.java 2012-11-14 15:44:29 UTC (rev 2154) @@ -31,6 +31,7 @@ import uk.org.ogsadai.activity.io.TupleListActivityInput; import uk.org.ogsadai.activity.io.TupleListIterator; import uk.org.ogsadai.activity.io.TypedActivityInput; +import uk.org.ogsadai.activity.io.TypedOptionalActivityInput; import uk.org.ogsadai.common.msgs.DAILogger; import uk.org.ogsadai.config.Key; import uk.org.ogsadai.config.KeyValueProperties; @@ -74,7 +75,8 @@ return new ActivityInput[] { new TupleListActivityInput(INPUT_DATA1), new TupleListActivityInput(INPUT_DATA2), - new TypedActivityInput(INPUT_CONDITION, String.class), + // if join implementation is Product then there's no condition + new TypedOptionalActivityInput(INPUT_CONDITION, String.class), new TypedActivityInput(INPUT_READ_FIRST, String.class), }; } @@ -101,10 +103,13 @@ TupleListIterator data1 = (TupleListIterator) iterationData[0]; TupleListIterator data2 = (TupleListIterator) iterationData[1]; String condition = (String) iterationData[2]; - String readFirst = (String)iterationData[3]; + String readFirst = (String) iterationData[3]; boolean readRightInputFirst = INPUT_DATA2.equals(readFirst); join.storeRightTuples(readRightInputFirst); - join.setCondition(DerbyUtilities.getCondition(condition)); + if (condition != null) + { + join.setCondition(DerbyUtilities.getCondition(condition)); + } TupleMetadata metadata1 = (TupleMetadata)data1.getMetadataWrapper().getMetadata(); TupleMetadata metadata2 = Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/BatchTableScanBuilder.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/BatchTableScanBuilder.java 2012-11-14 14:09:12 UTC (rev 2153) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/BatchTableScanBuilder.java 2012-11-14 15:44:29 UTC (rev 2154) @@ -99,7 +99,7 @@ // Construct IfEmptyList activity GenericActivity ifEmptyList = - new GenericActivity("uk.org.ogsadai.astro.IfEmptyList"); + new GenericActivity("uk.org.ogsadai.IfEmptyList"); ifEmptyList.createInput("data"); ifEmptyList.createInput("content"); ifEmptyList.createOutput("outputEmpty"); Deleted: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/ProductJoin.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/ProductJoin.java 2012-11-14 14:09:12 UTC (rev 2153) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/ProductJoin.java 2012-11-14 15:44:29 UTC (rev 2154) @@ -1,32 +0,0 @@ -package uk.org.ogsadai.dqp.lqp.optimiser.join; - -import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; -import uk.org.ogsadai.resource.dataresource.dqp.RequestDQPFederation; - -public class ProductJoin implements JoinImplementation, JoinPlan -{ - - @Override - public double getCost() - { - // TODO Auto-generated method stub - return 0; - } - - @Override - public void apply() - { - // TODO Auto-generated method stub - - } - - @Override - public JoinPlan process( - RequestDQPFederation requestFederation, - InnerThetaJoinOperator joinOperator) - { - // TODO Auto-generated method stub - return null; - } - -} Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/presentation/common/XMLDQPResourceConfiguration.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/presentation/common/XMLDQPResourceConfiguration.java 2012-11-14 14:09:12 UTC (rev 2153) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/presentation/common/XMLDQPResourceConfiguration.java 2012-11-14 15:44:29 UTC (rev 2154) @@ -96,7 +96,9 @@ * path of the configuration file which can be either an absolute * path or relative to the OGSA-DAI config path. */ - public XMLDQPResourceConfiguration(String configPath, EvaluationNodeFactory factory) + public XMLDQPResourceConfiguration( + String configPath, + EvaluationNodeFactory factory) { // check whether path is absolute or relative if(!(new File(configPath)).isAbsolute()) @@ -123,8 +125,10 @@ * factory of evaluation nodes * @throws DQPResourceConfigurationException */ - protected void configure(InputStream inputStream, - EvaluationNodeFactory factory) throws DQPResourceConfigurationException + protected void configure( + InputStream inputStream, + EvaluationNodeFactory factory) + throws DQPResourceConfigurationException { mDataNodes = new HashSet<DataNode>(); mEvalNodes = new HashSet<EvaluationNode>(); @@ -132,16 +136,18 @@ DQPResourceConfigType config; try { - JAXBContext jc = JAXBContext - .newInstance("uk.org.ogsadai.dqp.bindings.xmlresconf"); + JAXBContext jc = + JAXBContext.newInstance( + "uk.org.ogsadai.dqp.bindings.xmlresconf"); Unmarshaller u = jc.createUnmarshaller(); Schema mySchema; - SchemaFactory sf = SchemaFactory - .newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); + SchemaFactory sf = SchemaFactory.newInstance( + XMLConstants.W3C_XML_SCHEMA_NS_URI); - URL schemaURL = Thread.currentThread().getContextClassLoader() - .getResource("schema/DQPResourceConfig.xsd"); + URL schemaURL = + Thread.currentThread().getContextClassLoader().getResource( + "schema/DQPResourceConfig.xsd"); if(schemaURL != null) { @@ -159,8 +165,8 @@ } } - config = ((JAXBElement<DQPResourceConfigType>) u - .unmarshal(inputStream)).getValue(); + config = ((JAXBElement<DQPResourceConfigType>) + u.unmarshal(inputStream)).getValue(); } catch (SAXException e) { @@ -181,17 +187,17 @@ EvaluationNode evalNode; try { - evalNode = factory.createEvaluationNode(r.getUrl(), drer, dsos, - dsis, r.isIsLocal()); + evalNode = factory.createEvaluationNode( + r.getUrl(), drer, dsos, dsis, r.isIsLocal()); } catch (MalformedURLException e) { throw new DQPResourceConfigurationException(e); } - DataNode dataNode = r.getAlias() == null ? new SimpleDataNode(r - .getResourceID(), evalNode) : new SimpleDataNode(r - .getResourceID(), r.getAlias(), evalNode); + DataNode dataNode = r.getAlias() == null ? + new SimpleDataNode(r.getResourceID(), evalNode) : + new SimpleDataNode(r.getResourceID(), r.getAlias(), evalNode); if (r.isIsLocal()) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 14:09:22
|
Revision: 2153 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2153&view=rev Author: amykrause Date: 2012-11-14 14:09:12 +0000 (Wed, 14 Nov 2012) Log Message: ----------- Added resource ID. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/ProductSelectBuilder.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/ProductSelectBuilder.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/ProductSelectBuilder.java 2012-11-14 14:07:33 UTC (rev 2152) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/ProductSelectBuilder.java 2012-11-14 14:09:12 UTC (rev 2153) @@ -40,10 +40,20 @@ /** Logger. */ private static final DAILogger LOG = DAILogger.getLogger(ProductSelectBuilder.class); + + private String mResourceID; /** - * {@inheritDoc} + * Sets the resource ID of the data resource for storing temporary tables. + * + * @param resourceID + * data resource ID */ + public void setResourceID(String resourceID) + { + mResourceID = resourceID; + } + public SingleActivityOutput build( Operator op, List<SingleActivityOutput> outputs, @@ -58,6 +68,7 @@ GenericActivity product = new GenericActivity("uk.org.ogsadai.TupleProduct"); + if (mResourceID != null) product.setResourceID(mResourceID); product.createInput("data1"); product.createInput("data2"); product.createOutput("result"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 14:07:44
|
Revision: 2152 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2152&view=rev Author: amykrause Date: 2012-11-14 14:07:33 +0000 (Wed, 14 Nov 2012) Log Message: ----------- New join implementation. Added Paths: ----------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/DerbyStreamingJoinBuilder.java Added: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/DerbyStreamingJoinBuilder.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/DerbyStreamingJoinBuilder.java (rev 0) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/execute/workflow/DerbyStreamingJoinBuilder.java 2012-11-14 14:07:33 UTC (rev 2152) @@ -0,0 +1,98 @@ +// Copyright (c) The University of Edinburgh, 2012. +// +// LICENCE-START +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// LICENCE-END + +package uk.org.ogsadai.dqp.execute.workflow; + +import java.util.List; + +import uk.org.ogsadai.client.toolkit.SingleActivityOutput; +import uk.org.ogsadai.client.toolkit.activities.generic.GenericActivity; +import uk.org.ogsadai.common.msgs.DAILogger; +import uk.org.ogsadai.dqp.execute.ActivityConstructionException; +import uk.org.ogsadai.dqp.lqp.Operator; +import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; + +/** + * Builds activities for operator INNER THETA JOIN. + * + * @author The OGSA-DAI Project Team. + */ +public class DerbyStreamingJoinBuilder implements ActivityPipelineBuilder +{ + /** Copyright notice. */ + private static final String COPYRIGHT_NOTICE = + "Copyright (c) The University of Edinburgh, 2012"; + + /** Logger. */ + private static final DAILogger LOG = + DAILogger.getLogger(DerbyStreamingJoinBuilder.class); + + private String mResourceID; + + /** + * Sets the resource ID of the data resource for storing temporary tables. + * + * @param resourceID + * data resource ID + */ + public void setResourceID(String resourceID) + { + mResourceID = resourceID; + } + + public SingleActivityOutput build( + Operator op, + List<SingleActivityOutput> outputs, + PipelineWorkflowBuilder builder) + throws ActivityConstructionException + { + LOG.debug("In DerbyStreamingJoinBuilder"); + + InnerThetaJoinOperator operator = (InnerThetaJoinOperator)op; + SingleActivityOutput outputLeft = outputs.get(0); + SingleActivityOutput outputRight = outputs.get(1); + + GenericActivity join = + new GenericActivity("uk.org.ogsadai.TupleJoin"); + if (mResourceID != null) join.setResourceID(mResourceID); + join.createInput("data1"); + join.createInput("data2"); + join.createInput("condition"); + join.createOutput("result"); + join.connectInput("data1", outputLeft); + join.connectInput("data2", outputRight); + + String readFirst = (String)operator.getAnnotation("readFirst"); + if (readFirst == null || readFirst.equals("left")) + { + readFirst = "data1"; + } + else + { + readFirst = "data2"; + } + join.createInput("readFirst"); + join.addInput("readFirst", readFirst); + + String sql = operator.getPredicate().toString(); + join.addInput("condition", sql); + + builder.add(join); + + return join.getOutput("result"); + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-14 13:45:02
|
Revision: 2151 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2151&view=rev Author: amykrause Date: 2012-11-14 13:44:51 +0000 (Wed, 14 Nov 2012) Log Message: ----------- New join implementation. Added Paths: ----------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoin.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoinPlan.java Added: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoin.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoin.java (rev 0) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoin.java 2012-11-14 13:44:51 UTC (rev 2151) @@ -0,0 +1,52 @@ +package uk.org.ogsadai.dqp.lqp.optimiser.join; + +import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.lqp.Annotation; +import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; +import uk.org.ogsadai.expression.Expression; +import uk.org.ogsadai.resource.dataresource.dqp.RequestDQPFederation; + +public class DerbyStreamingJoin implements JoinImplementation +{ + + @Override + public JoinPlan process( + RequestDQPFederation requestFederation, + InnerThetaJoinOperator joinOperator) + { + DerbyStreamingJoinPlan plan = new DerbyStreamingJoinPlan(joinOperator); + + DataNode dataNode = Annotation.getDataNodeAnnotation(joinOperator); + Expression joinCondition = joinOperator.getPredicate().getExpression(); + if (!dataNode.supportsExpression(joinCondition)) + { + return null; + } + + double leftCard = + Annotation.getCardinalityAnnotation(joinOperator.getChild(0)); + double rightCard = + Annotation.getCardinalityAnnotation(joinOperator.getChild(1)); + Cost cost = new Cost(); + double streamedCard, storedCard; + if (leftCard > rightCard) + { + streamedCard = leftCard; + storedCard = rightCard; + plan.setReadFirst("data2"); + } + else + { + streamedCard = rightCard; + storedCard = leftCard; + plan.setReadFirst("data1"); + } + cost.setReads(streamedCard + storedCard); + cost.setQueries(streamedCard); + cost.setMaterialise(storedCard); + + plan.setCost(cost.getCost()); + return plan; + } + +} Added: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoinPlan.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoinPlan.java (rev 0) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/DerbyStreamingJoinPlan.java 2012-11-14 13:44:51 UTC (rev 2151) @@ -0,0 +1,43 @@ +package uk.org.ogsadai.dqp.lqp.optimiser.join; + +import uk.org.ogsadai.dqp.lqp.Annotation; +import uk.org.ogsadai.dqp.lqp.exceptions.LQPException; +import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; + +public class DerbyStreamingJoinPlan implements JoinPlan +{ + + private InnerThetaJoinOperator mJoinOperator; + private double mCost; + private String mReadFirst; + + public DerbyStreamingJoinPlan(InnerThetaJoinOperator joinOperator) + { + mJoinOperator = joinOperator; + } + + public void setCost(double cost) + { + mCost = cost; + } + + public void setReadFirst(String readFirst) + { + mReadFirst = readFirst; + } + + @Override + public double getCost() + { + return mCost; + } + + @Override + public void apply() throws LQPException + { + Annotation.addImplementationAnnotation( + mJoinOperator, "DERBY_STREAMING_JOIN"); + Annotation.addReadFirstAnnotation(mJoinOperator, mReadFirst); + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-13 16:51:51
|
Revision: 2150 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2150&view=rev Author: amykrause Date: 2012-11-13 16:51:40 +0000 (Tue, 13 Nov 2012) Log Message: ----------- More bug fixes. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TableScanImplosionOptimiser.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/expression/arithmetic/visitors/AttrRenameToPhysicalAttrArithmeticExprVisitor.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TableScanImplosionOptimiser.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TableScanImplosionOptimiser.java 2012-11-13 15:29:03 UTC (rev 2149) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TableScanImplosionOptimiser.java 2012-11-13 16:51:40 UTC (rev 2150) @@ -27,6 +27,7 @@ import uk.org.ogsadai.dqp.lqp.OperatorID; import uk.org.ogsadai.dqp.lqp.Predicate; import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityEstimator; +import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityStatistics; import uk.org.ogsadai.dqp.lqp.exceptions.LQPException; import uk.org.ogsadai.dqp.lqp.operators.AbstractJoinOperator; import uk.org.ogsadai.dqp.lqp.operators.BinaryOperator; @@ -249,13 +250,18 @@ parentOp.disconnect(); tableScanOp.getParent().update(); - Annotation.addCardinalityAnnotation( - tableScanOp, - Annotation.getCardinalityAnnotation(parentOp)); - Annotation.addCardinalityStatisticsAnnotation( - tableScanOp, - Annotation.getCardinalityStatisticsAnnotation(parentOp)); - + Double cardinality = Annotation.getCardinalityAnnotation(parentOp); + if (cardinality != null) + { + Annotation.addCardinalityAnnotation(tableScanOp, cardinality); + } + CardinalityStatistics cardStats = + Annotation.getCardinalityStatisticsAnnotation(parentOp); + if (cardStats != null) + { + Annotation.addCardinalityStatisticsAnnotation( + tableScanOp, cardStats); + } implode(tableScanOp); } Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/expression/arithmetic/visitors/AttrRenameToPhysicalAttrArithmeticExprVisitor.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/expression/arithmetic/visitors/AttrRenameToPhysicalAttrArithmeticExprVisitor.java 2012-11-13 15:29:03 UTC (rev 2149) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/expression/arithmetic/visitors/AttrRenameToPhysicalAttrArithmeticExprVisitor.java 2012-11-13 16:51:40 UTC (rev 2150) @@ -72,10 +72,13 @@ { try { - String originalTableName = - mDataDictionary.getOriginalTableName( - tableColumn.getSource(), mDataNode); - tableColumn.rename(tableColumn.getName(), originalTableName); + if (tableColumn.getSource() != null) + { + String originalTableName = + mDataDictionary.getOriginalTableName( + tableColumn.getSource(), mDataNode); + tableColumn.rename(tableColumn.getName(), originalTableName); + } } catch (TableNotFoundException e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-13 15:29:15
|
Revision: 2149 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2149&view=rev Author: amykrause Date: 2012-11-13 15:29:03 +0000 (Tue, 13 Nov 2012) Log Message: ----------- Tidying up. Modified Paths: -------------- ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/activity/dqp/CreateDQPResourceTest.java ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java Modified: ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/activity/dqp/CreateDQPResourceTest.java =================================================================== --- ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/activity/dqp/CreateDQPResourceTest.java 2012-11-13 14:27:43 UTC (rev 2148) +++ ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/activity/dqp/CreateDQPResourceTest.java 2012-11-13 15:29:03 UTC (rev 2149) @@ -18,10 +18,8 @@ import java.net.URL; import java.sql.ResultSet; -import java.sql.SQLException; import junit.framework.TestCase; - import uk.org.ogsadai.client.toolkit.DataRequestExecutionResource; import uk.org.ogsadai.client.toolkit.DataResource; import uk.org.ogsadai.client.toolkit.PipelineWorkflow; @@ -32,21 +30,11 @@ import uk.org.ogsadai.client.toolkit.activities.dqp.CreateDQPResource; import uk.org.ogsadai.client.toolkit.activities.sql.SQLQuery; import uk.org.ogsadai.client.toolkit.activities.transform.TupleToWebRowSetCharArrays; -import uk.org.ogsadai.client.toolkit.exception.ClientException; -import uk.org.ogsadai.client.toolkit.exception.DataSourceUsageException; -import uk.org.ogsadai.client.toolkit.exception.DataStreamErrorException; import uk.org.ogsadai.client.toolkit.exception.RequestCompletedWithErrorException; -import uk.org.ogsadai.client.toolkit.exception.RequestErrorException; -import uk.org.ogsadai.client.toolkit.exception.RequestException; import uk.org.ogsadai.client.toolkit.messages.Message; -import uk.org.ogsadai.client.toolkit.exception.ResourceUnknownException; -import uk.org.ogsadai.client.toolkit.exception.UnexpectedDataValueException; import uk.org.ogsadai.exception.ErrorID; -import uk.org.ogsadai.converters.webrowset.WebRowSetResultSetParseException; import uk.org.ogsadai.resource.ResourceID; -import uk.org.ogsadai.resource.ResourceType; import uk.org.ogsadai.resource.request.RequestExecutionStatus; -import uk.org.ogsadai.test.server.Utility; import uk.org.ogsadai.test.server.ServerTestProperties; import uk.org.ogsadai.test.server.TestServerProxyFactory; import uk.org.ogsadai.test.server.dqp.DQPServerTestConstants; @@ -206,8 +194,8 @@ "<DQPResourceConfig>" + "<dataResources>" + " <resource url=\"" + serverURL + "\"" + - " dsis=\"DataSinkService\"" + - " dsos=\"DataSourceService\"" + + " dsis=\"dataSinks\"" + + " dsos=\"dataSources\"" + " drerID=\"" + drerID + "\"" + " resourceID=\"" + jdbcID + "\"" + " isLocal=\"true\"/>" + Modified: ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java =================================================================== --- ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java 2012-11-13 14:27:43 UTC (rev 2148) +++ ogsa-dai/trunk/server-tests/presentation/extensions/dqp/src/test/ext/java/uk/org/ogsadai/test/server/dqp/DQPServerTest.java 2012-11-13 15:29:03 UTC (rev 2149) @@ -1095,20 +1095,20 @@ * @throws Exception * If any problems arise. */ - public void testUnionWithConstantAndIncompatibleValue() throws Exception - { - String sqlDQP = "SELECT room FROM Resource2_class " - + "UNION ALL SELECT 0 FROM Resource3_class "; - try - { - execute(sqlDQP); - } - catch (RequestExecutionException e) - { - // TODO validate causal exception chain. - System.out.println(e.getRequestResource().getRequestExecutionStatus()); - } - } + public void testUnionWithConstantAndIncompatibleValue() throws Exception + { + String sqlDQP = "SELECT room FROM Resource2_class " + + "UNION ALL SELECT 0 FROM Resource3_class "; + try + { + execute(sqlDQP); + } + catch (RequestExecutionException e) + { + // TODO validate causal exception chain. + System.out.println(e.getRequestResource().getRequestExecutionStatus()); + } + } /** * Tests using functions below an IS NULL node of an expression tree. @@ -1312,19 +1312,19 @@ */ public void testBindingPatternSimpleWithNull() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM aircraft a, certified_null b " - + "WHERE a.aid = b.aid"; - - String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_certified_null b " - + "WHERE @a.aid = b.aid"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM aircraft a, certified_null b " + + "WHERE a.aid = b.aid"; + + String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_certified_null b " + + "WHERE @a.aid = b.aid"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } /** @@ -1334,19 +1334,19 @@ */ public void testBindingPatternWithExpr() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM aircraft a, certified b " - + "WHERE a.aid = b.aid + 2"; - - String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_certified b " - + "WHERE @a.aid = b.aid + 2"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM aircraft a, certified b " + + "WHERE a.aid = b.aid + 2"; + + String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_certified b " + + "WHERE @a.aid = b.aid + 2"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } /** @@ -1356,19 +1356,19 @@ */ public void testBindingPatternWithFunc() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM flights f, flights g " - + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY)"; - - String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " - + "WHERE @f.arrives = AddDays(g.departs, 1)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM flights f, flights g " + + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY)"; + + String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " + + "WHERE @f.arrives = AddDays(g.departs, 1)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } /** @@ -1378,21 +1378,21 @@ */ public void testBindingPatternWithFuncAndConst() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM flights f, flights g " - + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY) " - + "AND f.arrives = '2005-04-13'"; - - String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " - + "WHERE @f.arrives = AddDays(g.departs, 1) " - + "AND @f.arrives = '2005-04-13'"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM flights f, flights g " + + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY) " + + "AND f.arrives = '2005-04-13'"; + + String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " + + "WHERE @f.arrives = AddDays(g.departs, 1) " + + "AND @f.arrives = '2005-04-13'"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } /** @@ -1402,21 +1402,21 @@ */ public void testBindingPatternWithTwoBindings() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM flights f, flights g " - + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY) " - + "AND f.arrives = DATE_SUB(g.arrives, INTERVAL 1 DAY)"; - - String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " - + "WHERE @f.arrives = AddDays(g.departs, 1) " - + "AND @f.arrives = SubDays(g.arrives, 1)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM flights f, flights g " + + "WHERE f.arrives = DATE_ADD(g.departs, INTERVAL 1 DAY) " + + "AND f.arrives = DATE_SUB(g.arrives, INTERVAL 1 DAY)"; + + String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " + + "WHERE @f.arrives = AddDays(g.departs, 1) " + + "AND @f.arrives = SubDays(g.arrives, 1)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } /** @@ -1425,24 +1425,24 @@ * @throws Exception */ public void testBindingPatternWithTwoBindingsDifferentAttributes() - throws Exception + throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM flights f, flights g " - + "WHERE f.arrives = g.arrives " - + "AND f.departs = g.arrives - 1"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM flights f, flights g " + + "WHERE f.arrives = g.arrives " + + "AND f.departs = g.arrives - 1"; - String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " - + "WHERE @f.arrives = g.arrives " - + "AND @f.departs = SubDays(g.arrives, 1)"; + String sqlDQP = "SELECT * FROM Resource2_flights f, MySQLResource_flights g " + + "WHERE @f.arrives = g.arrives " + + "AND @f.departs = SubDays(g.arrives, 1)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } /** * Tests a binding pattern where value providing table local and empty. @@ -1451,20 +1451,20 @@ */ public void testBindingPatternWithEmptyValuesLocalRes() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM aircraft a, empty b " - + "WHERE a.aid = b.id"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM aircraft a, empty b " + + "WHERE a.aid = b.id"; - String sqlDQP = "SELECT * FROM MySQLResource_aircraft a, MySQLResource_empty b " - + "WHERE @a.aid = b.id"; + String sqlDQP = "SELECT * FROM MySQLResource_aircraft a, MySQLResource_empty b " + + "WHERE @a.aid = b.id"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - setCompare(actual, expected); - } + setCompare(actual, expected); } + } /** * Tests a binding pattern where value providing table remote and empty. @@ -1473,20 +1473,20 @@ */ public void testBindingPatternWithEmptyValues() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM aircraft a, empty b " - + "WHERE a.aid = b.id"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM aircraft a, empty b " + + "WHERE a.aid = b.id"; - String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_empty b " - + "WHERE @a.aid = b.id"; + String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_empty b " + + "WHERE @a.aid = b.id"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - setCompare(actual, expected); - } + setCompare(actual, expected); } + } /** * Test a binding pattern where value receiving table is local and empty. @@ -1494,22 +1494,22 @@ * @throws Exception */ public void testBindingPatternWithEmptyBoundTableLocalRes() - throws Exception + throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM aircraft a, empty b " - + "WHERE a.aid = b.id"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM aircraft a, empty b " + + "WHERE a.aid = b.id"; - String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_empty b " - + "WHERE @b.id = a.aid"; + String sqlDQP = "SELECT * FROM Resource2_aircraft a, MySQLResource_empty b " + + "WHERE @b.id = a.aid"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - setCompare(actual, expected); - } + setCompare(actual, expected); } + } /** * Test a binding pattern where a local resource has NULL values. @@ -1518,21 +1518,21 @@ */ public void testBindingPatternWithNullsLocalRes() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT a.tinyintT, b.intT FROM mysql_types a, mysql_types b " - + "WHERE a.tinyintT = b.intT"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT a.tinyintT, b.intT FROM mysql_types a, mysql_types b " + + "WHERE a.tinyintT = b.intT"; - String sqlDQP = "SELECT a.tinyintT, b.intT " - + "FROM MySQLResource_mysql_types a, MySQLResource_mysql_types b " - + "WHERE @a.tinyintT = b.intT"; + String sqlDQP = "SELECT a.tinyintT, b.intT " + + "FROM MySQLResource_mysql_types a, MySQLResource_mysql_types b " + + "WHERE @a.tinyintT = b.intT"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - setCompare(actual, expected); - } + setCompare(actual, expected); } + } /** * Test a binding pattern where a local resource has NULL values. @@ -1541,20 +1541,20 @@ */ public void testBindingPatternWithNulls() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT a.tinyintT, b.intT FROM mysql_types a, mysql_types b " - + "WHERE a.tinyintT = b.intT"; - - String sqlDQP = "SELECT a.tinyintT, b.intT " - + "FROM Resource2_mysql_types a, MySQLResource_mysql_types b " - + "WHERE @a.tinyintT = b.intT"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - setCompare(actual, expected); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT a.tinyintT, b.intT FROM mysql_types a, mysql_types b " + + "WHERE a.tinyintT = b.intT"; + + String sqlDQP = "SELECT a.tinyintT, b.intT " + + "FROM Resource2_mysql_types a, MySQLResource_mysql_types b " + + "WHERE @a.tinyintT = b.intT"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + setCompare(actual, expected); + } } /** @@ -1563,58 +1563,58 @@ */ public void testIn() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types WHERE tinyintT " - + "IN (SELECT smallintT FROM mysql_types WHERE smallintT > 80)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types WHERE tinyintT " - + "IN (SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT > 80)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - listCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types WHERE tinyintT " + + "IN (SELECT smallintT FROM mysql_types WHERE smallintT > 80)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types WHERE tinyintT " + + "IN (SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT > 80)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } } public void testNotInLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } public void testNotInRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT NOT IN (" - + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM mysql_types WHERE smallintT < 90)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT NOT IN (" + + "SELECT smallintT FROM MySQLResource_mysql_types WHERE smallintT < 90)"; - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet expected = executeQueryDirect(sqlDirect); ResultSet actual = execute(sqlDQP); - setCompare(expected, actual); - } + setCompare(expected, actual); } + } public void testCorrInLocal() throws Exception { @@ -1637,41 +1637,41 @@ public void testCorrInRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " - + "WHERE t.tinyintT = smallintT and smallintT > 80)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types t " - + "WHERE tinyintT IN (SELECT smallintT FROM MySQLResource_mysql_types s " - + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT IN (SELECT smallintT FROM mysql_types " + + "WHERE t.tinyintT = smallintT and smallintT > 80)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types t " + + "WHERE tinyintT IN (SELECT smallintT FROM MySQLResource_mysql_types s " + + "WHERE t.tinyintT = s.smallintT and s.smallintT > 80)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } public void testCorrNotInLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = smallintT AND smallintT < 90)"; - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT NOT IN (SELECT smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.smallintT AND s.smallintT < 90)"; + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT NOT IN (SELECT smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.smallintT AND s.smallintT < 90)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } public void testCorrNotInRemote() throws Exception @@ -1714,131 +1714,131 @@ public void testAnyRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM mysql_types WHERE smallintT > 80)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM mysql_types WHERE smallintT > 80)"; - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT > ANY (SELECT smallintT " - + "FROM MySQLResource_mysql_types WHERE smallintT > 80)"; + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT > ANY (SELECT smallintT " + + "FROM MySQLResource_mysql_types WHERE smallintT > 80)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } public void testAllLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT tinyintT, smallintT FROM mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT tinyintT, smallintT FROM mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; - String sqlDQP = "SELECT tinyintT, smallintT FROM MySQLResource_mysql_types " - + "WHERE tinyintT < ALL (SELECT s.smallintT FROM MySQLResource_mysql_types s)"; + String sqlDQP = "SELECT tinyintT, smallintT FROM MySQLResource_mysql_types " + + "WHERE tinyintT < ALL (SELECT s.smallintT FROM MySQLResource_mysql_types s)"; - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); - } + listCompare(expected, actual); } + } public void testAllRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; - String sqlDQP = "SELECT * FROM Resource2_mysql_types " - + "WHERE tinyintT < ALL (SELECT smallintT FROM MySQLResource_mysql_types)"; + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM mysql_types)"; + String sqlDQP = "SELECT * FROM Resource2_mysql_types " + + "WHERE tinyintT < ALL (SELECT smallintT FROM MySQLResource_mysql_types)"; - ResultSet expected = executeQueryDirect(sqlDirect); + ResultSet expected = executeQueryDirect(sqlDirect); ResultSet actual = execute(sqlDQP); - setCompare(expected, actual); - } + setCompare(expected, actual); } + } public void testCorrAnyLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT < ANY (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT < ANY (SELECT s.smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - setCompare(expected, actual); - } - } - - public void testCorrAnyRemote() throws Exception - { for (int i = 0; i < 1; i++) { String sqlDirect = "SELECT * FROM mysql_types t " + "WHERE tinyintT < ANY (SELECT smallintT " + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; - String sqlDQP = "SELECT * FROM Resource2_mysql_types t " - + "WHERE tinyintT < ANY (SELECT smallintT " - + "FROM MySQLResource_mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT < ANY (SELECT s.smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT)"; ResultSet actual = execute(sqlDQP); ResultSet expected = executeQueryDirect(sqlDirect); - listCompare(expected, actual); + setCompare(expected, actual); } } + + public void testCorrAnyRemote() throws Exception + { + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT < ANY (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; + String sqlDQP = "SELECT * FROM Resource2_mysql_types t " + + "WHERE tinyintT < ANY (SELECT smallintT " + + "FROM MySQLResource_mysql_types WHERE t.tinyintT = tinyintT AND tinyintT > 50)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + listCompare(expected, actual); + } + } + public void testCorrAllLocal() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT < ALL (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT < 90)"; - - String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " - + "WHERE tinyintT < ALL (SELECT smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT AND s.tinyintT < 90)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - setCompare(expected, actual); - } + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT < ALL (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT < 90)"; + + String sqlDQP = "SELECT * FROM MySQLResource_mysql_types t " + + "WHERE tinyintT < ALL (SELECT smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT AND s.tinyintT < 90)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + setCompare(expected, actual); + } } public void testCorrAllRemote() throws Exception { - for (int i = 0; i < 1; i++) - { - String sqlDirect = "SELECT * FROM mysql_types t " - + "WHERE tinyintT < ALL (SELECT smallintT " - + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT < 90)"; - - String sqlDQP = "SELECT * FROM Resource2_mysql_types t " - + "WHERE tinyintT < ALL (SELECT smallintT " - + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT AND s.tinyintT < 90)"; - - ResultSet actual = execute(sqlDQP); - ResultSet expected = executeQueryDirect(sqlDirect); - - setCompare(expected, actual); + for (int i = 0; i < 1; i++) + { + String sqlDirect = "SELECT * FROM mysql_types t " + + "WHERE tinyintT < ALL (SELECT smallintT " + + "FROM mysql_types WHERE t.tinyintT = tinyintT AND tinyintT < 90)"; + + String sqlDQP = "SELECT * FROM Resource2_mysql_types t " + + "WHERE tinyintT < ALL (SELECT smallintT " + + "FROM MySQLResource_mysql_types s WHERE t.tinyintT = s.tinyintT AND s.tinyintT < 90)"; + + ResultSet actual = execute(sqlDQP); + ResultSet expected = executeQueryDirect(sqlDirect); + + setCompare(expected, actual); + } } - } /** * Tests mod function with valid and invalid inputs. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-13 14:27:53
|
Revision: 2148 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2148&view=rev Author: amykrause Date: 2012-11-13 14:27:43 +0000 (Tue, 13 Nov 2012) Log Message: ----------- Reflected change in tested class. Modified Paths: -------------- ogsa-dai/trunk/server-tests/presentation/jersey/src/test/ext/java/uk/org/ogsadai/jersey/client/test/JerseyClientTest.java Modified: ogsa-dai/trunk/server-tests/presentation/jersey/src/test/ext/java/uk/org/ogsadai/jersey/client/test/JerseyClientTest.java =================================================================== --- ogsa-dai/trunk/server-tests/presentation/jersey/src/test/ext/java/uk/org/ogsadai/jersey/client/test/JerseyClientTest.java 2012-11-13 14:20:51 UTC (rev 2147) +++ ogsa-dai/trunk/server-tests/presentation/jersey/src/test/ext/java/uk/org/ogsadai/jersey/client/test/JerseyClientTest.java 2012-11-13 14:27:43 UTC (rev 2148) @@ -6,7 +6,6 @@ import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; -import java.util.Iterator; import java.util.List; import junit.framework.TestCase; @@ -687,7 +686,7 @@ ); DataValueIterator iterator = new SimpleDataValueIterator(values.iterator()); - dataSink.writeStream(iterator, 0); + dataSink.writeStream(iterator); dataSink.close(); request.pollUntilRequestCompleted(50); @@ -710,95 +709,6 @@ } - public void testWriteStreamWithKeepAliveToDataSink() throws Exception - { - Server server = getServer(); - DataRequestExecutionResource drer = getDRER(server); - - JerseyDataSink dataSink = - (JerseyDataSink)ResourceFactory.createDataSink(server, drer); - - ReadFromDataSink read = new ReadFromDataSink(); - read.setResourceID(dataSink.getResourceID()); - DeliverToRequestStatus deliver = new DeliverToRequestStatus(); - deliver.connectInput(read.getOutput()); - PipelineWorkflow workflow = new PipelineWorkflow(); - workflow.add(read); - workflow.add(deliver); - - try - { - RequestResource request = - drer.execute(workflow, RequestExecutionType.ASYNCHRONOUS); - final List<DataValue> values = Arrays.asList( - new StringData("Hello World!"), - new IntegerData(1), - new LongData(2), - new DoubleData(3.4), - new FloatData(1.2f), - new StringData("Bye World!") - ); - // Construct an iterator that pauses after every other value - Iterator<DataValue> valuesIterator = new Iterator<DataValue>() - { - int count = 0; - Iterator<DataValue> it = values.iterator(); - - @Override - public boolean hasNext() - { - return it.hasNext(); - } - - @Override - public DataValue next() - { - count++; - if (count % 2 == 1) - { - try - { - Thread.sleep(50); - } - catch (InterruptedException e) - { - Thread.currentThread().interrupt(); - } - } - return it.next(); - } - - @Override - public void remove() - { - it.remove(); - } - - }; - DataValueIterator iterator = - new SimpleDataValueIterator(valuesIterator); - dataSink.writeStream(iterator, 10); - dataSink.close(); - - request.pollUntilRequestCompleted(50); - request.getRequestStatus(); - - DataValueIterator dv = deliver.getDataValueIterator(); - for (DataValue value : values) - { - assertTrue(dv.hasNext()); - assertEquals(value, dv.next()); - } - assertFalse(dv.hasNext()); - - } - finally - { - dataSink.destroy(); - } - - } - public void testWriteToUnknownDataSink() throws Exception { Server server = getServer(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-13 14:21:02
|
Revision: 2147 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2147&view=rev Author: amykrause Date: 2012-11-13 14:20:51 +0000 (Tue, 13 Nov 2012) Log Message: ----------- Data sink client writes each block in a separate request. Modified Paths: -------------- ogsa-dai/trunk/presentation/jersey/client/src/main/java/uk/org/ogsadai/client/toolkit/presentation/jersey/JerseyDataSink.java ogsa-dai/trunk/presentation/jersey/server/src/main/java/uk/org/ogsadai/activity/delivery/DeliverToJerseyDataSinkActivity.java Modified: ogsa-dai/trunk/presentation/jersey/client/src/main/java/uk/org/ogsadai/client/toolkit/presentation/jersey/JerseyDataSink.java =================================================================== --- ogsa-dai/trunk/presentation/jersey/client/src/main/java/uk/org/ogsadai/client/toolkit/presentation/jersey/JerseyDataSink.java 2012-11-13 14:19:04 UTC (rev 2146) +++ ogsa-dai/trunk/presentation/jersey/client/src/main/java/uk/org/ogsadai/client/toolkit/presentation/jersey/JerseyDataSink.java 2012-11-13 14:20:51 UTC (rev 2147) @@ -18,15 +18,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; import java.util.Iterator; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; @@ -135,19 +127,10 @@ /** * This method streams a large data set from a data value iterator to the - * server. A keep-alive block is sent at a fixed time period to prevent the - * receiver from timing out. </p> - * <p> - * Note that the data source is not closed automatically at the end of the - * stream and this method can be called multiple time. The data source must - * be closed explicity by calling <code>close()</code>. + * server. Each block is sent in a separate request. * * @param data * data iterator - * @param keepAlive - * time period (in milliseconds) of the keep alive block - 0 or a - * negative value indicates that no keep alive blocks should be - * sent * @throws IOException * @throws DataStreamErrorException * @throws UnexpectedDataValueException @@ -157,7 +140,7 @@ * @throws ResourceUnknownException * @throws ClientException */ - public void writeStream(DataValueIterator data, long keepAlive) + public void writeStream(DataValueIterator data) throws IOException, DataStreamErrorException, UnexpectedDataValueException, @@ -167,102 +150,28 @@ ResourceUnknownException, ClientException { - final PipedOutputStream pipedOutput = new PipedOutputStream(); - final PipedInputStream pipedInput = new PipedInputStream(pipedOutput); - BinaryDataValueOutputStream output = - new BinaryDataValueOutputStream(pipedOutput); - - Callable<Void> putRequest = new Callable<Void>() - { - @Override - public Void call() - { - Client client = Client.create(); - WebResource dataSink = client.resource(mURL.toString()); - dataSink.type(MediaType.APPLICATION_OCTET_STREAM).post(pipedInput); - return null; - } - }; - Future<Void> future = - Executors.newSingleThreadExecutor().submit(putRequest); - - // write KEEP_ALIVE blocks at the specified period - Timer timer = null; - if (keepAlive > 0) - { - timer = new Timer(); - timer.schedule(new KeepAliveTimerTask(output), keepAlive, keepAlive); - } - try { - try + while (data.hasNext()) { - while (data.hasNext()) - { - DataValue next = data.next(); - output.write(next); - } - output.close(); - future.get(); + DataValue next = data.next(); + putValue(next); } - catch (DataStreamErrorException e) - { - output.closeWithError(); - closeDueToError(); - future.get(); - throw e; - } - catch (UnexpectedDataValueException e) - { - output.closeWithError(); - closeDueToError(); - future.get(); - throw e; - } - catch (DataSourceUsageException e) - { - output.closeWithError(); - closeDueToError(); - future.get(); - throw e; - } } - catch (InterruptedException e) + catch (DataStreamErrorException e) { - output.closeWithError(); closeDueToError(); - Thread.currentThread().interrupt(); - } - catch (ExecutionException e) + throw e; + } + catch (UnexpectedDataValueException e) { - if (e.getCause() instanceof ClientHandlerException) - { - throw new RemoteServerCommsException(mURL, e.getCause()); - } - if (e.getCause() instanceof UniformInterfaceException) - { - UniformInterfaceException exc = - (UniformInterfaceException)e.getCause(); - Status status = Status.fromStatusCode(exc.getResponse().getStatus()); - if (status == Status.NOT_FOUND) - { - throw new ResourceUnknownException(getResourceID()); - } - if (status == Status.INTERNAL_SERVER_ERROR) - { - throw new ServerException(ErrorID.SERVER_ERROR, e.getCause()); - } - } - throw new ServerException(ErrorID.SERVER_ERROR, e.getCause()); + closeDueToError(); + throw e; } - finally + catch (DataSourceUsageException e) { - future.cancel(true); - if (timer != null) - { - timer.cancel(); - } + closeDueToError(); + throw e; } } @@ -332,7 +241,7 @@ @Override public void putValues( - Iterator dataValueIterator, + Iterator<DataValue> dataValueIterator, DataStreamStatus streamStatus) throws ServerCommsException, ServerException, @@ -521,9 +430,12 @@ } @Override - public void putValues(Iterator dataValueIterator, long sequenceNumber) - throws ServerCommsException, ServerException, - ResourceUnknownException, ClientException + public void putValues( + Iterator<DataValue> dataValueIterator, long sequenceNumber) + throws ServerCommsException, + ServerException, + ResourceUnknownException, + ClientException { try { @@ -545,10 +457,14 @@ } @Override - public void putValues(Iterator dataValueIterator, - DataStreamStatus streamStatus, long sequenceNumber) - throws ServerCommsException, ServerException, - ResourceUnknownException, ClientException + public void putValues( + Iterator<DataValue> dataValueIterator, + DataStreamStatus streamStatus, + long sequenceNumber) + throws ServerCommsException, + ServerException, + ResourceUnknownException, + ClientException { try { @@ -591,34 +507,4 @@ return "JerseyDataSink[" + mURL + "]"; } - /** - * This timer task writes one keep alive block. - * - * @author The OGSA-DAI Project Team. - */ - private static class KeepAliveTimerTask extends TimerTask - { - /** Output stream to write to. */ - private BinaryDataValueOutputStream mDataOutput; - - public KeepAliveTimerTask(BinaryDataValueOutputStream output) - { - mDataOutput = output; - } - - @Override - public void run() - { - try - { - mDataOutput.keepAlive(); - } - catch (IOException e) - { - // ignore - } - } - } - - } Modified: ogsa-dai/trunk/presentation/jersey/server/src/main/java/uk/org/ogsadai/activity/delivery/DeliverToJerseyDataSinkActivity.java =================================================================== --- ogsa-dai/trunk/presentation/jersey/server/src/main/java/uk/org/ogsadai/activity/delivery/DeliverToJerseyDataSinkActivity.java 2012-11-13 14:19:04 UTC (rev 2146) +++ ogsa-dai/trunk/presentation/jersey/server/src/main/java/uk/org/ogsadai/activity/delivery/DeliverToJerseyDataSinkActivity.java 2012-11-13 14:20:51 UTC (rev 2147) @@ -223,7 +223,7 @@ { // write a stream with a keep alive block every 5 minutes LOG.debug("Now delivering data stream to data sink."); - dataSinkResource.writeStream(iterator, 5*60000); + dataSinkResource.writeStream(iterator); LOG.debug("Finished writing."); dataSinkResource.close(); LOG.debug("Closed data source."); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-13 14:19:15
|
Revision: 2146 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2146&view=rev Author: amykrause Date: 2012-11-13 14:19:04 +0000 (Tue, 13 Nov 2012) Log Message: ----------- Added generics. Modified Paths: -------------- ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/DataSinkResource.java ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/resource/BaseDataSinkResource.java Modified: ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/DataSinkResource.java =================================================================== --- ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/DataSinkResource.java 2012-11-12 12:29:42 UTC (rev 2145) +++ ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/DataSinkResource.java 2012-11-13 14:19:04 UTC (rev 2146) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2007. +// Copyright (c) The University of Edinburgh, 2007-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -168,7 +168,7 @@ * if the data could not be written to the data sink due to the * state of the data sink. */ - void putValues(Iterator/*<DataValue>*/ dataValueIterator) + void putValues(Iterator<DataValue> dataValueIterator) throws ServerCommsException, ServerException, ResourceUnknownException, @@ -197,7 +197,7 @@ * state of the data sink. */ void putValues( - Iterator/*<DataValue>*/ dataValueIterator, + Iterator<DataValue> dataValueIterator, DataStreamStatus streamStatus) throws ServerCommsException, ServerException, @@ -373,9 +373,14 @@ * if the data could not be written to the data sink due to the * state of the data sink. */ - void putValues(DataValue[] values, DataStreamStatus streamStatus, - long sequenceNumber) throws ServerCommsException, ServerException, - ResourceUnknownException, ClientException; + void putValues( + DataValue[] values, + DataStreamStatus streamStatus, + long sequenceNumber) + throws ServerCommsException, + ServerException, + ResourceUnknownException, + ClientException; /** * Writes multiple data values to the sink resource. The status of the @@ -396,9 +401,11 @@ * if the data could not be written to the data sink due to the * state of the data sink. */ - void putValues(Iterator/* <DataValue> */dataValueIterator, - long sequenceNumber) throws ServerCommsException, ServerException, - ResourceUnknownException, ClientException; + void putValues(Iterator<DataValue> dataValueIterator, long sequenceNumber) + throws ServerCommsException, + ServerException, + ResourceUnknownException, + ClientException; /** * Writes multiple data values to the sink resource and flags the status of @@ -425,8 +432,9 @@ * state of the data sink. */ void putValues( - Iterator/*<DataValue>*/ dataValueIterator, - DataStreamStatus streamStatus, long sequenceNumber) + Iterator<DataValue> dataValueIterator, + DataStreamStatus streamStatus, + long sequenceNumber) throws ServerCommsException, ServerException, ResourceUnknownException, Modified: ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/resource/BaseDataSinkResource.java =================================================================== --- ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/resource/BaseDataSinkResource.java 2012-11-12 12:29:42 UTC (rev 2145) +++ ogsa-dai/trunk/core/client/src/main/java/uk/org/ogsadai/client/toolkit/resource/BaseDataSinkResource.java 2012-11-13 14:19:04 UTC (rev 2146) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2007-2010. +// Copyright (c) The University of Edinburgh, 2007-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -49,7 +49,7 @@ { /** Copyright notice */ private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2007-2010."; + "Copyright (c) The University of Edinburgh, 2007-2012."; /** Controller to data sink calls. */ protected DataSinkResourceCallController mCallController; @@ -78,8 +78,8 @@ { mCallController = controller; } - - // Interface implementation + + @Override public void putValue(final DataValue value) throws ServerCommsException, ServerException, @@ -89,7 +89,7 @@ putValue(value, DataStreamStatus.OPEN); } - // Interface implementation + @Override public void putValues(DataValue[] values) throws ServerCommsException, ServerException, @@ -99,8 +99,8 @@ putValues(values, DataStreamStatus.OPEN); } - // Interface implementation - public void putValues(Iterator/*<DataValue>*/ dataValueIterator) + @Override + public void putValues(Iterator<DataValue> dataValueIterator) throws ServerCommsException, ServerException, ResourceUnknownException, @@ -109,8 +109,7 @@ putValues(dataValueIterator, DataStreamStatus.OPEN); } - - // Interface implementation + @Override public DataSinkStatus getStatus() throws ServerCommsException, ServerException, @@ -158,9 +157,7 @@ return putValuesNBHelper(values, null, null, streamStatus, null); } - /** - * {@inheritDoc} - */ + @Override public int putValuesNB(DataValue[] values, int start, int length) throws ServerCommsException, ServerException, ResourceUnknownException, ClientException @@ -169,9 +166,7 @@ null); } - /** - * {@inheritDoc} - */ + @Override public int putValuesNB(DataValue[] values, int start, int length, long sequenceNumber) throws ServerCommsException, ServerException, @@ -181,9 +176,7 @@ sequenceNumber); } - /** - * {@inheritDoc} - */ + @Override public int putValuesNB(DataValue[] values, int start, int length, DataStreamStatus streamStatus) throws ServerCommsException, ServerException, @@ -192,9 +185,7 @@ return putValuesNBHelper(values, start, length, streamStatus, null); } - /** - * {@inheritDoc} - */ + @Override public int putValuesNB(DataValue[] values, int start, int length, DataStreamStatus streamStatus, long sequenceNumber) throws ServerCommsException, ServerException, @@ -203,9 +194,7 @@ return putValuesNBHelper(values, start, length, streamStatus, sequenceNumber); } - /** - * {@inheritDoc} - */ + @Override public void putUntilCompleteNB(DataValue[] values, DataStreamStatus streamStatus) throws ServerCommsException, ServerException, This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-12 12:29:53
|
Revision: 2145 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2145&view=rev Author: amykrause Date: 2012-11-12 12:29:42 +0000 (Mon, 12 Nov 2012) Log Message: ----------- Fixing tests. Removed Paths: ------------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OrderedSweepJoinOptimiserTest.java Deleted: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OrderedSweepJoinOptimiserTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OrderedSweepJoinOptimiserTest.java 2012-11-12 12:24:10 UTC (rev 2144) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OrderedSweepJoinOptimiserTest.java 2012-11-12 12:29:42 UTC (rev 2145) @@ -1,93 +0,0 @@ -// Copyright (c) The University of Edinburgh, 2011. -// -// LICENCE-START -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// LICENCE-END - -package uk.org.ogsadai.dqp.lqp.optimiser.join; - -import junit.framework.TestCase; -import uk.org.ogsadai.dqp.lqp.CommonPredicate; -import uk.org.ogsadai.dqp.lqp.Predicate; -import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; -import uk.org.ogsadai.dqp.lqp.udf.repository.SimpleFunctionRepository; -import uk.org.ogsadai.dqp.lqp.udf.scalar.adql.Box; -import uk.org.ogsadai.dqp.lqp.udf.scalar.adql.Contains; -import uk.org.ogsadai.dqp.lqp.udf.scalar.adql.Point; - -public class OrderedSweepJoinOptimiserTest extends TestCase -{ - /** Copyright notice. */ - private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2011"; - - /** - * Constructor. - * - * @param name - */ - public OrderedSweepJoinOptimiserTest(String name) - { - super(name); - } - - /** - * Tests finding the sweep predicate when the predicate is the predicate - * of the join operation. - */ - public void testFindSweepPredicateWhenPredicateInJoinOperator() - { - SimpleFunctionRepository functionRepository = - new SimpleFunctionRepository(); - - functionRepository.register(Contains.class); - functionRepository.register(Point.class); - functionRepository.register(Box.class); - - CommonPredicate predicate = new CommonPredicate( - "Contains(" + - " Point('',x,y), " + - " Box('',x1-0.1, y2-0.1, 0.2, 0.2)) = TRUE", - functionRepository); - - InnerThetaJoinOperator joinOperator = - new InnerThetaJoinOperator(predicate); - - Predicate resultPredicate = - OrderedSweepJoinOptimiser.findSweepPredicate(joinOperator); - - assertEquals( - "result predicate must be the given predicate", - predicate, resultPredicate); - } - - public void testFindSweepPredicateWhenPredicateInSelectOperator() - { - fail("test not yet implemented"); - } - - /** - * Tests finding the sweep predicate when there is no sweep predicate. - */ - public void testFindSweepPredicateWhenNoSweepPredicate() - { - CommonPredicate predicate = new CommonPredicate("x=y", null); - InnerThetaJoinOperator joinOperator = - new InnerThetaJoinOperator(predicate); - - Predicate resultPredicate = - OrderedSweepJoinOptimiser.findSweepPredicate(joinOperator); - - assertNull("result predicate must be null", resultPredicate); - } -} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-12 12:24:17
|
Revision: 2144 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2144&view=rev Author: amykrause Date: 2012-11-12 12:24:10 +0000 (Mon, 12 Nov 2012) Log Message: ----------- Fixing tests. Modified Paths: -------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/common/simple/SimpleLogicalSchema.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/select/SelectPushDownOptimiserTest.java Modified: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/common/simple/SimpleLogicalSchema.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/common/simple/SimpleLogicalSchema.java 2012-11-12 11:57:27 UTC (rev 2143) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/common/simple/SimpleLogicalSchema.java 2012-11-12 12:24:10 UTC (rev 2144) @@ -1,5 +1,6 @@ package uk.org.ogsadai.dqp.common.simple; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -25,6 +26,17 @@ } } + public SimpleLogicalSchema(String name, Attribute... attributes) + { + mName = name; + mAttributes = Arrays.asList(attributes); + mAttributeNames = new HashMap<String, Attribute>(); + for (Attribute attribute : attributes) + { + mAttributeNames.put(attribute.getName(), attribute); + } + } + public SimpleLogicalSchema(String name, LogicalSchema schema) { this(name, schema.getAttributes()); Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/select/SelectPushDownOptimiserTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/select/SelectPushDownOptimiserTest.java 2012-11-12 11:57:27 UTC (rev 2143) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/select/SelectPushDownOptimiserTest.java 2012-11-12 12:24:10 UTC (rev 2144) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2008. +// Copyright (c) The University of Edinburgh, 2008-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,16 +20,15 @@ import java.util.List; import junit.framework.TestCase; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaData; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaData; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.DataDictionary; import uk.org.ogsadai.dqp.common.DataNode; import uk.org.ogsadai.dqp.common.EvaluationNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; import uk.org.ogsadai.dqp.lqp.AttributeImpl; import uk.org.ogsadai.dqp.lqp.BindingPredicate; import uk.org.ogsadai.dqp.lqp.Branch; @@ -69,7 +68,7 @@ { /** Copyright notice. */ private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2008"; + "Copyright (c) The University of Edinburgh, 2008-2012"; /** * Constructor. @@ -263,39 +262,33 @@ "MyResource", evaluationNode); - TableMetaDataImpl table1MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "faculty"); - ColumnMetaData[] allColumnMetaData = new ColumnMetaData[3]; - allColumnMetaData[0] = - getColumnMetaData("fid", 1, TupleTypes._LONG, table1MetaData); - allColumnMetaData[1] = - getColumnMetaData("fname", 2, TupleTypes._STRING, table1MetaData); - allColumnMetaData[2] = - getColumnMetaData("deptid", 3, TupleTypes._INT, table1MetaData); - table1MetaData.setColumns(allColumnMetaData); + String table1Name = "MyResource_faculty"; + Attribute[] allColumnMetadata = new Attribute[3]; + allColumnMetadata[0] = new AttributeImpl("fid", TupleTypes._LONG, table1Name); + allColumnMetadata[1] = new AttributeImpl("fname", TupleTypes._STRING, table1Name); + allColumnMetadata[2] = new AttributeImpl("deptid", TupleTypes._INT, table1Name); + LogicalSchema table1MetaData = + new SimpleLogicalSchema( + table1Name, + allColumnMetadata); TableSchema table1Schema = new SimpleTableSchema( - "catalog", + "faculty", dataNode, table1MetaData); - TableMetaDataImpl table2MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "aircraft"); - allColumnMetaData = new ColumnMetaData[3]; - allColumnMetaData[0] = - getColumnMetaData("aid", 1, TupleTypes._INT, table2MetaData); - allColumnMetaData[1] = - getColumnMetaData("aname", 2, TupleTypes._STRING, table2MetaData); - allColumnMetaData[2] = - getColumnMetaData("cruisingrange", 3, TupleTypes._INT, table2MetaData); - table2MetaData.setColumns(allColumnMetaData); + String table2Name = "MyResource_aircraft"; + allColumnMetadata = new Attribute[3]; + allColumnMetadata[0] = new AttributeImpl("aid", TupleTypes._INT, table2Name); + allColumnMetadata[1] = new AttributeImpl("aname", TupleTypes._STRING, table2Name); + allColumnMetadata[2] = new AttributeImpl("cruisingrange", TupleTypes._INT, table2Name); + LogicalSchema table2MetaData = + new SimpleLogicalSchema( + table2Name, + allColumnMetadata); TableSchema table2Schema = new SimpleTableSchema( - "catalog", + "aircraft", dataNode, table2MetaData); @@ -306,22 +299,4 @@ return dataDictionary; } - /** - * Creates column metadata object. - * - * @param name - * @param position - * @param tupleType - * @param tableMetaData - * @return - */ - private ColumnMetaData getColumnMetaData( - String name, int position, int tupleType, TableMetaData tableMetaData) - { - ColumnMetaDataImpl result = new ColumnMetaDataImpl( - name, position, tableMetaData); - result.setTupleType(tupleType); - return result; - } - } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-12 11:57:36
|
Revision: 2143 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2143&view=rev Author: amykrause Date: 2012-11-12 11:57:27 +0000 (Mon, 12 Nov 2012) Log Message: ----------- Fixing tests. Modified Paths: -------------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/JoinOptimiserTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/MetadataWithStatisticsConverter.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestDQPFederation.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestHelper.java Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/JoinOptimiserTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/JoinOptimiserTest.java 2012-11-12 11:36:01 UTC (rev 2142) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/JoinOptimiserTest.java 2012-11-12 11:57:27 UTC (rev 2143) @@ -168,25 +168,4 @@ new FileReader(PATH + "UKIDSS_SDSS_single_feature.txt")); } - public void testJoinWithTwoEqualities() throws Exception - { - fail("Test not implemented!"); - } - - // public void testPhysicalSchema() throws Exception -// { -// TableSchema table = -// mTestHelper.getDataDictionary().getTableSchema("UKIDSS_lasSource"); -// assertTrue(table.getPhysicalSchema() instanceof StatisticsPhysicalSchema); -// StatisticsPhysicalSchema statsSchema = -// (StatisticsPhysicalSchema)table.getPhysicalSchema(); -// CardinalityStatistics stats = statsSchema.getCardinalityStatistics(); -// assertNotNull(stats); -// // would throw a runtime exception if no matching attribute -// AttributeStatistics attrStats = -// stats.getStatistics(new AttributeImpl("ra")); -// assertNotNull(attrStats); -// } - - } Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/MetadataWithStatisticsConverter.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/MetadataWithStatisticsConverter.java 2012-11-12 11:36:01 UTC (rev 2142) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/MetadataWithStatisticsConverter.java 2012-11-12 11:57:27 UTC (rev 2143) @@ -1,12 +1,14 @@ package uk.org.ogsadai.lqp.optimiser.join; import java.sql.Types; -import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.log4j.Logger; import org.w3c.dom.Document; @@ -14,13 +16,12 @@ import org.w3c.dom.Node; import org.w3c.dom.NodeList; -import uk.org.ogsadai.activity.astro.VOTableDataTypeConverter; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaData; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; import uk.org.ogsadai.dqp.lqp.AttributeImpl; import uk.org.ogsadai.dqp.lqp.cardinality.AttributeHistogramBin; import uk.org.ogsadai.dqp.lqp.cardinality.AttributeStatistics; @@ -37,7 +38,15 @@ Logger.getLogger(MetadataWithStatisticsConverter.class); private static final Map<String, Integer> DATATYPE_TO_SQLTYPE = new HashMap<String, Integer>(); - + public static final Map<String, String> TAP_TYPE_TO_DSA = + new HashMap<String, String>(); + public static final Set<String> DSA_TYPES = + new HashSet<String>( Arrays.asList( + "float", "int", "string", "dateTime", "boolean", + "bit", "unsignedByte", + "short", "long", "char", "unicodeChar", "double", + "floatComplex", "doubleComplex")); + static { DATATYPE_TO_SQLTYPE.put("bit", Types.BIT); @@ -73,6 +82,18 @@ DATATYPE_TO_SQLTYPE.put("blob", Types.BLOB); DATATYPE_TO_SQLTYPE.put("clob", Types.CLOB); DATATYPE_TO_SQLTYPE.put("boolean", Types.BOOLEAN); + + TAP_TYPE_TO_DSA.put("BOOLEAN", "boolean"); + TAP_TYPE_TO_DSA.put("SMALLINT", "short"); + TAP_TYPE_TO_DSA.put("INTEGER", "int"); + TAP_TYPE_TO_DSA.put("BIGINT", "long"); + TAP_TYPE_TO_DSA.put("FLOAT", "float"); + TAP_TYPE_TO_DSA.put("REAL", "double"); + TAP_TYPE_TO_DSA.put("DOUBLE", "double"); + TAP_TYPE_TO_DSA.put("TIMESTAMP", "dateTime"); + TAP_TYPE_TO_DSA.put("CHAR", "char"); + TAP_TYPE_TO_DSA.put("VARCHAR", "string"); + TAP_TYPE_TO_DSA.put("CLOB", "string"); } /** @@ -101,7 +122,8 @@ { Element table = (Element)tables.item(i); String tableName = table.getElementsByTagName("name").item(0).getTextContent(); - TableMetaDataImpl tableMetadata = new TableMetaDataImpl("", "", tableName); + String alias = dataNode.getTableNamePrefix() + "_" + tableName; + List<Attribute> attributes = new LinkedList<Attribute>(); LOG.debug("Table metadata for table " + tableName); @@ -109,13 +131,12 @@ Double numRows = getValueAsDouble(table, "rowCount"); SimpleCardinalityStatistics stats = new SimpleCardinalityStatistics(); - List<ColumnMetaData> columnsMetadata = new ArrayList<ColumnMetaData>(); NodeList columns = table.getElementsByTagNameNS("", "column"); for (int j=0; j<columns.getLength(); j++) { Element column = (Element)columns.item(j); String colName = column.getElementsByTagName("name").item(0).getTextContent(); - ColumnMetaDataImpl columnMetadata = new ColumnMetaDataImpl(colName, j, tableMetadata); + Attribute columnMetadata = new AttributeImpl(colName, alias); NodeList dataTypeNodes = column.getElementsByTagName("dataType"); if (dataTypeNodes.getLength() > 0) { @@ -141,15 +162,9 @@ } continue; } - columnMetadata.setDataType(dsaType); - columnMetadata.setTupleType( + columnMetadata.setType( TupleUtilities.mapSQLTypeToODTupleType( dsaType, dataType)); - columnMetadata.setTypeName(dataType); - columnMetadata.setColumnSize(-1); - columnMetadata.setDecimalDigits(-1); - columnMetadata.setNullable(true); - columnMetadata.setFullName(getValue(column, "description")); } AttributeStatistics attrStats = getAttributeStatistics(column, numRows); @@ -159,7 +174,7 @@ new AttributeImpl(colName, null), attrStats); numRows = attrStats.getNumRows(); } - columnsMetadata.add(columnMetadata); + attributes.add(columnMetadata); } SimpleStatisticsPhysicalSchema physSchema = null; @@ -178,13 +193,10 @@ "' - no row count available."); } } - - tableMetadata.setColumns((ColumnMetaData[]) - columnsMetadata.toArray( - new ColumnMetaData[columnsMetadata.size()])); - + LogicalSchema tableMetadata = + new SimpleLogicalSchema(alias, attributes); TableSchema tableSchema = - new SimpleTableSchema(null, dataNode, tableMetadata, physSchema); + new SimpleTableSchema(alias, dataNode, tableMetadata, physSchema); result.add(tableSchema); } return result; @@ -300,17 +312,6 @@ return histogram; } - private static String getValue(Element element, String tag) - { - NodeList list = element.getElementsByTagName(tag); - String result = null; - if (list.getLength() > 0) - { - result = list.item(0).getTextContent(); - } - return result; - } - private static Double getValueAsDouble(Element element, String tag) { NodeList list = element.getElementsByTagName(tag); @@ -349,7 +350,7 @@ if (type.endsWith("TAPType")) { // use the TAP type converter - dataType = VOTableDataTypeConverter.TAP_TYPE_TO_DSA.get(dataType); + dataType = TAP_TYPE_TO_DSA.get(dataType); } else if (type.endsWith("VOTableType")) { @@ -357,7 +358,7 @@ } } - if (VOTableDataTypeConverter.DSA_TYPES.contains(dataType)) + if (DSA_TYPES.contains(dataType)) { return dataType; } Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestDQPFederation.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestDQPFederation.java 2012-11-12 11:36:01 UTC (rev 2142) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestDQPFederation.java 2012-11-12 11:57:27 UTC (rev 2143) @@ -18,7 +18,7 @@ import uk.org.ogsadai.dqp.lqp.udf.FunctionRepository; import uk.org.ogsadai.dqp.presentation.common.DQPResourceConfigurationException; import uk.org.ogsadai.resource.dataresource.dqp.DQPFederation; -import uk.org.ogsadai.stats.astro.tools.XML; +import uk.org.ogsadai.util.xml.XML; public class TestDQPFederation implements DQPFederation { Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestHelper.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestHelper.java 2012-11-12 11:36:01 UTC (rev 2142) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/TestHelper.java 2012-11-12 11:57:27 UTC (rev 2143) @@ -128,18 +128,6 @@ resource.getKey(), resource.getKey(), evalNode); addTableSchemas(doc, dataNode); } -// String ukidssFile = "/Users/akrause/astronomy/SDSS/UKIDSS-DR6-tables-stats.xml"; -// String sdssFile = "/Users/akrause/astronomy/SDSS/SDSS-DR8-tables.xml"; -// Document ukidss = XML.fileToDocument(ukidssFile); -// Document sdss = XML.fileToDocument(sdssFile); -// -// DataNode dataNodeUKIDSS = new SimpleDataNode("UKIDSS", "UKIDSS", evalNode); -// addTableSchemas(ukidss, dataNodeUKIDSS); -// DataNode dataNodeSDSS = new SimpleDataNode("SDSS", "SDSS", evalNode); -// addTableSchemas(sdss, dataNodeSDSS); -// DataNode dataNodeUKIDSSLink = -// new SimpleDataNode("UKIDSS", "UKIDSS_Link", evalNode); -// addTableSchemas(ukidss, dataNodeUKIDSSLink); mFunctionRepository = FunctionRepositoryConfig.createFunctionRepository( This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <amy...@us...> - 2012-11-12 11:36:15
|
Revision: 2142 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2142&view=rev Author: amykrause Date: 2012-11-12 11:36:01 +0000 (Mon, 12 Nov 2012) Log Message: ----------- Updated tests. Modified Paths: -------------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/LQPTestUtils.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/operators/ProjectOperatorTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/ExtendedTableScanQueryTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TestDataDictionary.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/join/JoinOrderingOptimiserTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/expression/arithmetic/visitors/AttrRenameArithmeticExprVisitorTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/lqp/optimiser/join/JoinOptimiserTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/ThetaJoinTest.java Added Paths: ----------- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyAntiJoinTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyJoinTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbyOuterJoinTest.java sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/tuple/join/DerbySemiJoinTest.java Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/LQPTestUtils.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/LQPTestUtils.java 2012-11-12 11:32:50 UTC (rev 2141) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/LQPTestUtils.java 2012-11-12 11:36:01 UTC (rev 2142) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2008-2011. +// Copyright (c) The University of Edinburgh, 2008-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,8 +19,8 @@ import java.io.File; import java.io.IOException; import java.net.MalformedURLException; -import java.sql.Types; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.antlr.runtime.tree.CommonTree; @@ -29,13 +29,13 @@ import uk.org.ogsadai.context.OGSADAIConstants; import uk.org.ogsadai.context.OGSADAIContext; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.CompilerConfiguration; import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.PhysicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimplePhysicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; import uk.org.ogsadai.dqp.lqp.operators.TableScanOperator; @@ -62,7 +62,7 @@ { /** Copyright notice. */ private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2008-2011"; + "Copyright (c) The University of Edinburgh, 2008-2012"; /** Query parser. */ private SQLQueryParser mParser; @@ -169,365 +169,195 @@ SimpleDataDictionary dictionary = new SimpleDataDictionary(); // table 'authors' - TableMetaDataImpl metadata = new TableMetaDataImpl("", "", "authors"); - ColumnMetaDataImpl[] columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("name", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("id", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); - TableSchema schema = new SimpleTableSchema(metadata, "authors", node1); + String source = "authors"; + Attribute[] columns = new Attribute[2]; + columns[0] = new AttributeImpl("name", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("id", TupleTypes._INT, source); + LogicalSchema metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + TableSchema schema = new SimpleTableSchema(source, node1, metadata); dictionary.add(schema); // table 'authors_titles' - metadata = new TableMetaDataImpl("", "", "authors_titles"); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("name", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("author_id", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "authors_titles", node2); + source = "authors_titles"; + columns = new Attribute[2]; + columns[0] = new AttributeImpl("name", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("author_id", TupleTypes._INT, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node1, metadata); dictionary.add(schema); - // table employee - metadata = new TableMetaDataImpl("", "", "employee"); - columns = new ColumnMetaDataImpl[10]; - columns[0] = new ColumnMetaDataImpl("fname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("minit", 2, metadata); - columns[1].setDataType(Types.CHAR); - columns[1].setTupleType(TupleTypes._CHAR); - columns[2] = new ColumnMetaDataImpl("lname", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("ssn", 4, metadata); - columns[3].setDataType(Types.VARCHAR); - columns[3].setTupleType(TupleTypes._STRING); - columns[3].setPrimaryKey(true); - columns[4] = new ColumnMetaDataImpl("bdate", 5, metadata); - columns[4].setDataType(Types.DATE); - columns[4].setTupleType(TupleTypes._DATE); - columns[5] = new ColumnMetaDataImpl("address", 6, metadata); - columns[5].setDataType(Types.VARCHAR); - columns[5].setTupleType(TupleTypes._STRING); - columns[6] = new ColumnMetaDataImpl("sex", 7, metadata); - columns[6].setDataType(Types.CHAR); - columns[6].setTupleType(TupleTypes._CHAR); - columns[7] = new ColumnMetaDataImpl("salary", 8, metadata); - columns[7].setDataType(Types.DECIMAL); - columns[7].setTupleType(TupleTypes._BIGDECIMAL); - columns[8] = new ColumnMetaDataImpl("superssn", 9, metadata); - columns[8].setDataType(Types.VARCHAR); - columns[8].setTupleType(TupleTypes._STRING); - columns[8].setPrimaryKey(true); - columns[9] = new ColumnMetaDataImpl("dno", 10, metadata); - columns[9].setDataType(Types.INTEGER); - columns[9].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_employee", node2); + // table 'employee' + source = "employee"; + columns = new Attribute[10]; + columns[0] = new AttributeImpl("fname", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("minit", TupleTypes._CHAR, source); + columns[2] = new AttributeImpl("lname", TupleTypes._STRING, source); + columns[3] = new AttributeImpl("ssn", TupleTypes._STRING, source, true); + columns[4] = new AttributeImpl("bdate", TupleTypes._DATE, source); + columns[5] = new AttributeImpl("address", TupleTypes._STRING, source); + columns[6] = new AttributeImpl("sex", TupleTypes._CHAR, source); + columns[7] = new AttributeImpl("salary", TupleTypes._BIGDECIMAL, source); + columns[8] = new AttributeImpl("superssn", TupleTypes._STRING, source, true); + columns[9] = new AttributeImpl("dno", TupleTypes._INT, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node1, metadata); dictionary.add(schema); - // table employee_ - metadata = new TableMetaDataImpl("", "", "employee_r3"); - columns = new ColumnMetaDataImpl[10]; - columns[0] = new ColumnMetaDataImpl("fname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("minit", 2, metadata); - columns[1].setDataType(Types.CHAR); - columns[1].setTupleType(TupleTypes._CHAR); - columns[2] = new ColumnMetaDataImpl("lname", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("ssn", 4, metadata); - columns[3].setDataType(Types.VARCHAR); - columns[3].setTupleType(TupleTypes._STRING); - columns[3].setPrimaryKey(true); - columns[4] = new ColumnMetaDataImpl("bdate", 5, metadata); - columns[4].setDataType(Types.DATE); - columns[4].setTupleType(TupleTypes._DATE); - columns[5] = new ColumnMetaDataImpl("address", 6, metadata); - columns[5].setDataType(Types.VARCHAR); - columns[5].setTupleType(TupleTypes._STRING); - columns[6] = new ColumnMetaDataImpl("sex", 7, metadata); - columns[6].setDataType(Types.CHAR); - columns[6].setTupleType(TupleTypes._CHAR); - columns[7] = new ColumnMetaDataImpl("salary", 8, metadata); - columns[7].setDataType(Types.DECIMAL); - columns[7].setTupleType(TupleTypes._BIGDECIMAL); - columns[8] = new ColumnMetaDataImpl("superssn", 9, metadata); - columns[8].setDataType(Types.VARCHAR); - columns[8].setTupleType(TupleTypes._STRING); - columns[8].setPrimaryKey(true); - columns[9] = new ColumnMetaDataImpl("dno", 10, metadata); - columns[9].setDataType(Types.INTEGER); - columns[9].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_employee_r3", node2r3); + // table employee_r3 + source = "employee_r3"; + columns = new Attribute[10]; + columns[0] = new AttributeImpl("fname", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("minit", TupleTypes._CHAR, source); + columns[2] = new AttributeImpl("lname", TupleTypes._STRING, source); + columns[3] = new AttributeImpl("ssn", TupleTypes._STRING, source, true); + columns[4] = new AttributeImpl("bdate", TupleTypes._DATE, source); + columns[5] = new AttributeImpl("address", TupleTypes._STRING, source); + columns[6] = new AttributeImpl("sex", TupleTypes._CHAR, source); + columns[7] = new AttributeImpl("salary", TupleTypes._BIGDECIMAL, source); + columns[8] = new AttributeImpl("superssn", TupleTypes._STRING, source, true); + columns[9] = new AttributeImpl("dno", TupleTypes._INT, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema("l_employee_r3", node1, metadata); dictionary.add(schema); // table department - metadata = new TableMetaDataImpl("", "", "department"); - columns = new ColumnMetaDataImpl[4]; - columns[0] = new ColumnMetaDataImpl("dname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("dnumber", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - columns[2] = new ColumnMetaDataImpl("mgrssn", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[2].setPrimaryKey(true); - columns[3] = new ColumnMetaDataImpl("mgrstartdate", 4, metadata); - columns[3].setDataType(Types.DATE); - columns[3].setTupleType(TupleTypes._DATE); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_department", node2); + source = "department"; + columns = new Attribute[4]; + columns[0] = new AttributeImpl("dname", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("dnumber", TupleTypes._INT, source, true); + columns[2] = new AttributeImpl("mgrssn", TupleTypes._STRING, source, true); + columns[3] = new AttributeImpl("mgrstartdate", TupleTypes._DATE, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema("l_department", node2, metadata); dictionary.add(schema); // table dept_locations - metadata = new TableMetaDataImpl("", "", "dept_locations"); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("dnumber", 1, metadata); - columns[0].setDataType(Types.INTEGER); - columns[0].setTupleType(TupleTypes._INT); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("dlocation", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_dept_locations", node2); + source = "dept_locations"; + columns = new Attribute[2]; + columns[0] = new AttributeImpl("dnumber", TupleTypes._INT, source, true); + columns[1] = new AttributeImpl("dlocation", TupleTypes._STRING, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema( "l_dept_locations", node2, metadata); dictionary.add(schema); // table project - metadata = new TableMetaDataImpl("", "", "project"); - columns = new ColumnMetaDataImpl[4]; - columns[0] = new ColumnMetaDataImpl("pname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("pnumber", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - columns[2] = new ColumnMetaDataImpl("plocation", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("dnum", 4, metadata); - columns[3].setDataType(Types.INTEGER); - columns[3].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_project", node2); + source = "project"; + columns = new Attribute[4]; + columns[0] = new AttributeImpl("pname", TupleTypes._STRING, source); + columns[1] = new AttributeImpl("pnumber", TupleTypes._INT, source, true); + columns[2] = new AttributeImpl("plocation", TupleTypes._STRING, source); + columns[3] = new AttributeImpl("dnum", TupleTypes._INT, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema("l_project", node2, metadata); dictionary.add(schema); // table works_on - metadata = new TableMetaDataImpl("", "", "works_on"); - columns = new ColumnMetaDataImpl[3]; - columns[0] = new ColumnMetaDataImpl("essn", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("pno", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - columns[2] = new ColumnMetaDataImpl("hours", 3, metadata); - columns[2].setDataType(Types.DECIMAL); - columns[2].setTupleType(TupleTypes._BIGDECIMAL); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "l_works_on", node2); + source = "works_on"; + columns = new Attribute[3]; + columns[0] = new AttributeImpl("essn", TupleTypes._STRING, source, true); + columns[1] = new AttributeImpl("pno", TupleTypes._INT, source, true); + columns[2] = new AttributeImpl("hours", TupleTypes._BIGDECIMAL, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema("l_works_on", node2, metadata); dictionary.add(schema); // table dependent - metadata = new TableMetaDataImpl("", "", "dependent"); - columns = new ColumnMetaDataImpl[5]; - columns[0] = new ColumnMetaDataImpl("essn", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("dependent_name", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("sex", 3, metadata); - columns[2].setDataType(Types.CHAR); - columns[2].setTupleType(TupleTypes._CHAR); - columns[3] = new ColumnMetaDataImpl("bdate", 4, metadata); - columns[3].setDataType(Types.DATE); - columns[3].setTupleType(TupleTypes._DATE); - columns[4] = new ColumnMetaDataImpl("relationship", 5, metadata); - columns[4].setDataType(Types.VARCHAR); - columns[4].setTupleType(TupleTypes._STRING); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "dependent", node2); + source = "dependent"; + columns = new Attribute[5]; + columns[0] = new AttributeImpl("essn", TupleTypes._STRING, source, true); + columns[1] = new AttributeImpl("dependent_name", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("sex", TupleTypes._CHAR, source); + columns[3] = new AttributeImpl("bdate", TupleTypes._DATE, source); + columns[4] = new AttributeImpl("relationship", TupleTypes._STRING, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - // table dependent - metadata = new TableMetaDataImpl("", "", "dependent_r3"); - columns = new ColumnMetaDataImpl[5]; - columns[0] = new ColumnMetaDataImpl("essn", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("dependent_name", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("sex", 3, metadata); - columns[2].setDataType(Types.CHAR); - columns[2].setTupleType(TupleTypes._CHAR); - columns[3] = new ColumnMetaDataImpl("bdate", 4, metadata); - columns[3].setDataType(Types.DATE); - columns[3].setTupleType(TupleTypes._DATE); - columns[4] = new ColumnMetaDataImpl("relationship", 5, metadata); - columns[4].setDataType(Types.VARCHAR); - columns[4].setTupleType(TupleTypes._STRING); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "dependent", node2r3); + // table dependent_r3 + source = "dependent_r3"; + columns = new Attribute[5]; + columns[0] = new AttributeImpl("essn", TupleTypes._STRING, source, true); + columns[1] = new AttributeImpl("dependent_name", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("sex", TupleTypes._CHAR, source); + columns[3] = new AttributeImpl("bdate", TupleTypes._DATE, source); + columns[4] = new AttributeImpl("relationship", TupleTypes._STRING, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); // AMY's tables - metadata = new TableMetaDataImpl("", "", "aircraft"); - columns = new ColumnMetaDataImpl[3]; - columns[0] = new ColumnMetaDataImpl("aid", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("aname", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("cruisingrange", 3, metadata); - columns[2].setDataType(Types.DECIMAL); - columns[2].setTupleType(TupleTypes._BIGDECIMAL); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "aircraft", node2); + source = "aircraft"; + columns = new Attribute[3]; + columns[0] = new AttributeImpl("aid", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("aname", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("cruisingrange", TupleTypes._BIGDECIMAL, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "aircraft_r3"); - columns = new ColumnMetaDataImpl[3]; - columns[0] = new ColumnMetaDataImpl("aid", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("aname", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("cruisingrange", 3, metadata); - columns[2].setDataType(Types.DECIMAL); - columns[2].setTupleType(TupleTypes._BIGDECIMAL); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "aircraft_r3", node2); + source = "aircraft_r3"; + columns = new Attribute[3]; + columns[0] = new AttributeImpl("aid", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("aname", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("cruisingrange", TupleTypes._BIGDECIMAL, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "certified"); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("eid", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("aid", 2, metadata); - columns[1].setDataType(Types.DECIMAL); - columns[1].setTupleType(TupleTypes._BIGDECIMAL); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "certified", node2); + source = "certified"; + columns = new Attribute[2]; + columns[0] = new AttributeImpl("eid", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("aid", TupleTypes._BIGDECIMAL, source, true); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "certified_r3"); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("eid", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("aid", 2, metadata); - columns[1].setDataType(Types.DECIMAL); - columns[1].setTupleType(TupleTypes._BIGDECIMAL); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "certified_r3", node2r3); + source = "certified_r3"; + columns = new Attribute[2]; + columns[0] = new AttributeImpl("eid", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("aid", TupleTypes._BIGDECIMAL, source, true); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2r3, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "flights"); - columns = new ColumnMetaDataImpl[7]; - columns[0] = new ColumnMetaDataImpl("flno", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("origin", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("destination", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("distance", 4, metadata); - columns[3].setDataType(Types.DECIMAL); - columns[3].setTupleType(TupleTypes._BIGDECIMAL); - columns[4] = new ColumnMetaDataImpl("departs", 5, metadata); - columns[4].setDataType(Types.DATE); - columns[4].setTupleType(TupleTypes._DATE); - columns[5] = new ColumnMetaDataImpl("arrives", 6, metadata); - columns[5].setDataType(Types.DATE); - columns[5].setTupleType(TupleTypes._DATE); - columns[6] = new ColumnMetaDataImpl("price", 7, metadata); - columns[6].setDataType(Types.DECIMAL); - columns[6].setTupleType(TupleTypes._BIGDECIMAL); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "flights", node2); + source = "flights"; + columns = new Attribute[7]; + columns[0] = new AttributeImpl("flno", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("origin", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("destination", TupleTypes._STRING, source); + columns[3] = new AttributeImpl("distance",TupleTypes._BIGDECIMAL, source); + columns[4] = new AttributeImpl("departs", TupleTypes._DATE, source); + columns[5] = new AttributeImpl("arrives", TupleTypes._DATE, source); + columns[6] = new AttributeImpl("price", TupleTypes._BIGDECIMAL, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "employees"); - columns = new ColumnMetaDataImpl[3]; - columns[0] = new ColumnMetaDataImpl("eid", 1, metadata); - columns[0].setDataType(Types.DECIMAL); - columns[0].setTupleType(TupleTypes._BIGDECIMAL); - columns[0].setPrimaryKey(true); - columns[1] = new ColumnMetaDataImpl("ename", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("salary", 3, metadata); - columns[2].setDataType(Types.DECIMAL); - columns[2].setTupleType(TupleTypes._BIGDECIMAL); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "employees", node2); + source = "employees"; + columns = new Attribute[3]; + columns[0] = new AttributeImpl("eid", TupleTypes._BIGDECIMAL, source, true); + columns[1] = new AttributeImpl("ename", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("salary", TupleTypes._BIGDECIMAL, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "series"); - columns = new ColumnMetaDataImpl[4]; - columns[0] = new ColumnMetaDataImpl("name", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - - columns[1] = new ColumnMetaDataImpl("authorname", 2, metadata); - columns[1].setDataType(Types.VARCHAR); - columns[1].setTupleType(TupleTypes._STRING); - columns[2] = new ColumnMetaDataImpl("sid", 3, metadata); - columns[2].setDataType(Types.INTEGER); - columns[2].setTupleType(TupleTypes._INT); - columns[2].setPrimaryKey(true); - columns[3] = new ColumnMetaDataImpl("date", 4, metadata); - columns[3].setDataType(Types.VARCHAR); - columns[3].setTupleType(TupleTypes._STRING); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "series", node1); + source = "series"; + columns = new Attribute[4]; + columns[0] = new AttributeImpl("name",TupleTypes._STRING, source); + columns[1] = new AttributeImpl("authorname", TupleTypes._STRING, source); + columns[2] = new AttributeImpl("sid", TupleTypes._INT, source, true); + columns[3] = new AttributeImpl("date", TupleTypes._STRING, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node1, metadata); dictionary.add(schema); - metadata = new TableMetaDataImpl("", "", "timestampTable"); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("ts", 1, metadata); - columns[0].setDataType(Types.TIMESTAMP); - columns[0].setTupleType(TupleTypes._TIMESTAMP); - columns[1] = new ColumnMetaDataImpl("id", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); - schema = new SimpleTableSchema(metadata, "timestampTable", node1); + source = "timestampTable"; + columns = new Attribute[2]; + columns[0] = new AttributeImpl("ts", TupleTypes._TIMESTAMP, source); + columns[1] = new AttributeImpl("id", TupleTypes._INT, source); + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node1, metadata); dictionary.add(schema); // Test tables for the ISI case @@ -536,12 +366,15 @@ DataNode nodeISSResource2 = new SimpleDataNode( "http://host1/", "DRER", "DSoS", "DSiS", "TPCHResource2", true); - String[] tableNames = + String[] localNames = new String[]{ "PART", "PARTSUPP", "SUPPLIER", "LINEITEM", "LINEITEM" }; - String[] catalogueNames = - new String[]{ "cat1", "cat2", "cat2", "cat1", "cat2"}; - String[] schemaNames = - new String[]{ "schema1", "schema1", "schema1", "schema1", "schema1" }; + String[] tableNames = + new String[]{ + "TPCHResource1_PART", + "TPCHResource2_PARTSUPP", + "TPCHResource2_SUPPLIER", + "TPCHResource1_LINEITEM", + "TPCHResource2_LINEITEM" }; DataNode[] dataNodes= new DataNode[]{ nodeISSResource1, nodeISSResource2, nodeISSResource2, @@ -561,27 +394,23 @@ for (int i=0; i<tableNames.length; ++i) { - TableMetaDataImpl metaData = new TableMetaDataImpl( - catalogueNames[i], - schemaNames[i], - tableNames[i]); - - columns = new ColumnMetaDataImpl[columnNames[i].length]; - + metadata = new SimpleLogicalSchema(source, Arrays.asList(columns)); + schema = new SimpleTableSchema(source, node2, metadata); + + columns = new Attribute[columnNames[i].length]; for (int j=0; j<columnNames[i].length; ++j) { - ColumnMetaDataImpl columnMetaData = new ColumnMetaDataImpl( - columnNames[i][j], 0, metaData); - columnMetaData.setTupleType(columnTypes[i][j]); - columns[j] = columnMetaData; + columns[j] = new AttributeImpl( + columnNames[i][j], columnTypes[i][j], tableNames[i]); } - metaData.setColumns(columns); + LogicalSchema metaData = new SimpleLogicalSchema( + tableNames[i], Arrays.asList(columns)); TableSchema tableSchema = new SimpleTableSchema( - catalogueNames[i], + localNames[i], dataNodes[i], metaData); - dictionary.add(tableSchema); + dictionary.add(tableSchema); } mFunctionRepository = FunctionRepositoryConfig.createFunctionRepository( @@ -595,9 +424,8 @@ DataNode nodeAstro2 = new SimpleDataNode( "http://host1/", "DRER", "DSoS", "DSiS", "CAOM", true); - tableNames = new String[]{ "lasSource", "caom_SIAv1" }; - catalogueNames = new String[]{ "cat1", "cat2", "cat2" }; - schemaNames = new String[]{ "schema1", "schema1", "schema1"}; + localNames = new String[]{ "lasSource", "caom_SIAv1" }; + tableNames = new String[]{ "UKIDSS_lasSource", "CAOM_caom_SIAv1" }; dataNodes = new DataNode[]{ nodeAstro1, nodeAstro2 }; columnNames = new String[][]{ @@ -613,29 +441,22 @@ for (int i=0; i<tableNames.length; ++i) { - TableMetaDataImpl metaData = new TableMetaDataImpl( - catalogueNames[i], - schemaNames[i], - tableNames[i]); - columns = new ColumnMetaDataImpl[columnNames[i].length]; + columns = new Attribute[columnNames[i].length]; for (int j=0; j<columnNames[i].length; ++j) - { - ColumnMetaDataImpl columnMetaData = new ColumnMetaDataImpl( - columnNames[i][j], 0, metaData); - columnMetaData.setTupleType(columnTypes[i][j]); - columns[j] = columnMetaData; - } - metaData.setColumns(columns); - + { + columns[j] = new AttributeImpl( + columnNames[i][j], columnTypes[i][j], tableNames[i]); + } + LogicalSchema metaData = new SimpleLogicalSchema( + tableNames[i], Arrays.asList(columns)); + TableSchema tableSchema = new SimpleTableSchema( - catalogueNames[i], - dataNodes[i], - metaData, - physicalSchemas[i]); - - + localNames[i], + dataNodes[i], + metaData, + physicalSchemas[i]); dictionary.add(tableSchema); } @@ -646,9 +467,8 @@ DataNode nodeS2 = new SimpleDataNode( "http://host1/", "DRER", "DSoS", "DSiS", "S2", true); - tableNames = new String[]{ "isiS1", "isiS2", "isiS3"}; - catalogueNames = new String[]{ "cat1", "cat2", "cat2" }; - schemaNames = new String[]{ "schema1", "schema1", "schema1"}; + localNames = new String[]{ "isiS1", "isiS2", "isiS3"}; + tableNames = new String[]{ "S1_isiS1", "S2_isiS2", "S2_isiS3"}; dataNodes = new DataNode[]{ nodeS1, nodeS2, nodeS2 }; columnNames = new String[][]{ { "a",}, @@ -665,29 +485,21 @@ for (int i=0; i<tableNames.length; ++i) { - TableMetaDataImpl metaData = new TableMetaDataImpl( - catalogueNames[i], - schemaNames[i], - tableNames[i]); + columns = new Attribute[columnNames[i].length]; - columns = new ColumnMetaDataImpl[columnNames[i].length]; - for (int j=0; j<columnNames[i].length; ++j) { - ColumnMetaDataImpl columnMetaData = new ColumnMetaDataImpl( - columnNames[i][j], 0, metaData); - columnMetaData.setTupleType(columnTypes[i][j]); - columns[j] = columnMetaData; + columns[j] = new AttributeImpl( + columnNames[i][j], columnTypes[i][j], tableNames[i]); } - metaData.setColumns(columns); + LogicalSchema metaData = new SimpleLogicalSchema( + tableNames[i], Arrays.asList(columns)); TableSchema tableSchema = new SimpleTableSchema( - catalogueNames[i], - dataNodes[i], - metaData, - physicalSchemas[i]); - - + localNames[i], + dataNodes[i], + metaData, + physicalSchemas[i]); dictionary.add(tableSchema); } return dictionary; Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/operators/ProjectOperatorTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/operators/ProjectOperatorTest.java 2012-11-12 11:32:50 UTC (rev 2141) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/operators/ProjectOperatorTest.java 2012-11-12 11:36:01 UTC (rev 2142) @@ -16,6 +16,7 @@ package uk.org.ogsadai.dqp.lqp.operators; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; @@ -23,15 +24,13 @@ import org.antlr.runtime.tree.CommonTree; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaData; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaData; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.DataDictionary; import uk.org.ogsadai.dqp.common.DataNode; import uk.org.ogsadai.dqp.common.EvaluationNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; import uk.org.ogsadai.dqp.lqp.Attribute; import uk.org.ogsadai.dqp.lqp.AttributeImpl; @@ -333,17 +332,12 @@ projectChild, commonTree, mFunctionRepository); projectParent.update(); - System.out.println("Parent heading: " + projectParent.getHeading()); - System.out.println("Child heading: " + projectChild.getHeading()); - ProjectOperator newProject = projectParent.createMegredWithChild(); newProject.update(); // Rename map should be null assertNull("RenameMap must be null", newProject.getRenameMap()); - System.out.println(projectParent.getHeading()); - System.out.println(newProject.getHeading()); assertEquals(2, newProject.getHeading().getAttributes().size()); assertEquals( "aname", newProject.getHeading().getAttributes().get(0).getName()); @@ -657,39 +651,35 @@ "MyResource", evaluationNode); - TableMetaDataImpl table1MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "faculty"); - ColumnMetaData[] allColumnMetaData = new ColumnMetaData[3]; + String table1Name = "MyResource_faculty"; + Attribute[] allColumnMetaData = new Attribute[3]; allColumnMetaData[0] = - getColumnMetaData("fid", 1, TupleTypes._LONG, table1MetaData); + new AttributeImpl("fid", TupleTypes._LONG, table1Name); allColumnMetaData[1] = - getColumnMetaData("fname", 2, TupleTypes._STRING, table1MetaData); + new AttributeImpl("fname", TupleTypes._STRING, table1Name); allColumnMetaData[2] = - getColumnMetaData("deptid", 3, TupleTypes._INT, table1MetaData); - table1MetaData.setColumns(allColumnMetaData); + new AttributeImpl("deptid", TupleTypes._INT, table1Name); + LogicalSchema table1MetaData = + new SimpleLogicalSchema(table1Name, Arrays.asList(allColumnMetaData)); TableSchema table1Schema = new SimpleTableSchema( "catalog", dataNode, table1MetaData); - TableMetaDataImpl table2MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "aircraft"); - allColumnMetaData = new ColumnMetaData[3]; + String table2Name = "MyResource_aircraft"; + allColumnMetaData = new Attribute[3]; allColumnMetaData[0] = - getColumnMetaData("aid", 1, TupleTypes._INT, table2MetaData); + new AttributeImpl("aid", TupleTypes._INT, table2Name); allColumnMetaData[1] = - getColumnMetaData("aname", 2, TupleTypes._STRING, table2MetaData); + new AttributeImpl("aname", TupleTypes._STRING, table2Name); allColumnMetaData[2] = - getColumnMetaData("cruisingrange", 3, TupleTypes._INT, table2MetaData); - table2MetaData.setColumns(allColumnMetaData); + new AttributeImpl("cruisingrange", TupleTypes._INT, table2Name); + LogicalSchema table2MetaData = + new SimpleLogicalSchema(table2Name, Arrays.asList(allColumnMetaData)); TableSchema table2Schema = new SimpleTableSchema( - "catalog", + table2Name, dataNode, table2MetaData); @@ -700,21 +690,4 @@ return dataDictionary; } - /** - * Creates column metadata object. - * - * @param name - * @param position - * @param tupleType - * @param tableMetaData - * @return - */ - private ColumnMetaData getColumnMetaData( - String name, int position, int tupleType, TableMetaData tableMetaData) - { - ColumnMetaDataImpl result = new ColumnMetaDataImpl( - name, position, tableMetaData); - result.setTupleType(tupleType); - return result; - } } Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/ExtendedTableScanQueryTest.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/ExtendedTableScanQueryTest.java 2012-11-12 11:32:50 UTC (rev 2141) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/ExtendedTableScanQueryTest.java 2012-11-12 11:36:01 UTC (rev 2142) @@ -1,4 +1,4 @@ -// Copyright (c) The University of Edinburgh, 2011. +// Copyright (c) The University of Edinburgh, 2011-2012. // // LICENCE-START // Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,19 +16,18 @@ package uk.org.ogsadai.dqp.lqp.optimiser.implosion; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import junit.framework.TestCase; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaData; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaData; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.DataDictionary; import uk.org.ogsadai.dqp.common.DataNode; import uk.org.ogsadai.dqp.common.EvaluationNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; import uk.org.ogsadai.dqp.lqp.Attribute; import uk.org.ogsadai.dqp.lqp.AttributeImpl; @@ -48,7 +47,7 @@ { /** Copyright statement */ private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2011"; + "Copyright (c) The University of Edinburgh, 2011-2012"; /** Data dictionary. */ private DataDictionary mDataDictionary; @@ -166,39 +165,35 @@ "MyResource", evaluationNode); - TableMetaDataImpl table1MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "faculty"); - ColumnMetaData[] allColumnMetaData = new ColumnMetaData[3]; + String table1 = "MyResource_faculty"; + Attribute[] allColumnMetaData = new Attribute[3]; allColumnMetaData[0] = - getColumnMetaData("fid", 1, TupleTypes._LONG, table1MetaData); + new AttributeImpl("fid", TupleTypes._LONG, table1); allColumnMetaData[1] = - getColumnMetaData("fname", 2, TupleTypes._STRING, table1MetaData); + new AttributeImpl("fname", TupleTypes._STRING, table1); allColumnMetaData[2] = - getColumnMetaData("deptid", 3, TupleTypes._INT, table1MetaData); - table1MetaData.setColumns(allColumnMetaData); + new AttributeImpl("deptid", TupleTypes._INT, table1); + LogicalSchema table1MetaData = + new SimpleLogicalSchema(table1, Arrays.asList(allColumnMetaData)); TableSchema table1Schema = new SimpleTableSchema( - "catalog", - dataNode, - table1MetaData); + "faculty", + dataNode, + table1MetaData); - TableMetaDataImpl table2MetaData = new TableMetaDataImpl( - "catalog", - "schema", - "aircraft"); - allColumnMetaData = new ColumnMetaData[3]; + String table2 = "MyResource_aircraft"; + allColumnMetaData = new Attribute[3]; allColumnMetaData[0] = - getColumnMetaData("aid", 1, TupleTypes._INT, table2MetaData); + new AttributeImpl("aid", TupleTypes._INT, table2); allColumnMetaData[1] = - getColumnMetaData("aname", 2, TupleTypes._STRING, table2MetaData); + new AttributeImpl("aname", TupleTypes._STRING, table2); allColumnMetaData[2] = - getColumnMetaData("cruisingrange", 3, TupleTypes._INT, table2MetaData); - table2MetaData.setColumns(allColumnMetaData); + new AttributeImpl("cruisingrange", TupleTypes._INT, table2); + LogicalSchema table2MetaData = + new SimpleLogicalSchema(table2, Arrays.asList(allColumnMetaData)); TableSchema table2Schema = new SimpleTableSchema( - "catalog", + "aircraft", dataNode, table2MetaData); @@ -209,21 +204,4 @@ return dataDictionary; } - /** - * Creates column metadata object. - * - * @param name - * @param position - * @param tupleType - * @param tableMetaData - * @return - */ - private ColumnMetaData getColumnMetaData( - String name, int position, int tupleType, TableMetaData tableMetaData) - { - ColumnMetaDataImpl result = new ColumnMetaDataImpl( - name, position, tableMetaData); - result.setTupleType(tupleType); - return result; - } } Modified: sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TestDataDictionary.java =================================================================== --- sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TestDataDictionary.java 2012-11-12 11:32:50 UTC (rev 2141) +++ sandbox/dqp/server/src/test/unit/java/uk/org/ogsadai/dqp/lqp/optimiser/implosion/TestDataDictionary.java 2012-11-12 11:36:01 UTC (rev 2142) @@ -1,15 +1,16 @@ package uk.org.ogsadai.dqp.lqp.optimiser.implosion; import java.net.MalformedURLException; -import java.sql.Types; +import java.util.Arrays; -import uk.org.ogsadai.converters.databaseschema.ColumnMetaDataImpl; -import uk.org.ogsadai.converters.databaseschema.TableMetaDataImpl; import uk.org.ogsadai.dqp.common.DataNode; +import uk.org.ogsadai.dqp.common.LogicalSchema; import uk.org.ogsadai.dqp.common.PhysicalSchema; import uk.org.ogsadai.dqp.common.TableSchema; import uk.org.ogsadai.dqp.common.simple.SimpleDataDictionary; +import uk.org.ogsadai.dqp.common.simple.SimpleLogicalSchema; import uk.org.ogsadai.dqp.common.simple.SimpleTableSchema; +import uk.org.ogsadai.dqp.lqp.Attribute; import uk.org.ogsadai.dqp.lqp.AttributeImpl; import uk.org.ogsadai.dqp.lqp.cardinality.SimpleStatisticsPhysicalSchema; import uk.org.ogsadai.dqp.lqp.exceptions.TableNotFoundException; @@ -54,394 +55,218 @@ // table 'authors' String tableName = "authors"; - TableMetaDataImpl metadata = new TableMetaDataImpl("", "", tableName); - ColumnMetaDataImpl[] columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("name", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("id", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); + Attribute[] columns = new Attribute[2]; + columns[0] = new AttributeImpl("name", TupleTypes._STRING, tableName); + columns[1] = new AttributeImpl("id", TupleTypes._INT, tableName, true); + LogicalSchema metadata = new SimpleLogicalSchema(tableName, Arrays.asList(columns)); PhysicalSchema phys = createPhysicalSchema(tableName, columns); - TableSchema schema = new SimpleTableSchema(metadata, tableName, node1, phys); + TableSchema schema = new SimpleTableSchema(tableName, node1, metadata, phys); mDictionary.add(schema); // table 'authors_titles' tableName = "authors_titles"; - metadata = new TableMetaDataImpl("", "", tableName); - columns = new ColumnMetaDataImpl[2]; - columns[0] = new ColumnMetaDataImpl("name", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("author_id", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - metadata.setColumns(columns); + columns = new Attribute[2]; + columns[0] = new AttributeImpl("name", TupleTypes._STRING, tableName); + columns[1] = new AttributeImpl("author_id", TupleTypes._INT, tableName, true); + metadata = new SimpleLogicalSchema(tableName, Arrays.asList(columns)); phys = createPhysicalSchema(tableName, columns); - schema = new SimpleTableSchema(metadata, "authors_titles", node2, phys); + schema = new SimpleTableSchema(tableName, node2, metadata, phys); mDictionary.add(schema); // table employee - metadata = new TableMetaDataImpl("", "", "employee"); - columns = new ColumnMetaDataImpl[10]; - columns[0] = new ColumnMetaDataImpl("fname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("minit", 2, metadata); - columns[1].setDataType(Types.CHAR); - columns[1].setTupleType(TupleTypes._CHAR); - columns[2] = new ColumnMetaDataImpl("lname", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("ssn", 4, metadata); - columns[3].setDataType(Types.VARCHAR); - columns[3].setTupleType(TupleTypes._STRING); - columns[3].setPrimaryKey(true); - columns[4] = new ColumnMetaDataImpl("bdate", 5, metadata); - columns[4].setDataType(Types.DATE); - columns[4].setTupleType(TupleTypes._DATE); - columns[5] = new ColumnMetaDataImpl("address", 6, metadata); - columns[5].setDataType(Types.VARCHAR); - columns[5].setTupleType(TupleTypes._STRING); - columns[6] = new ColumnMetaDataImpl("sex", 7, metadata); - columns[6].setDataType(Types.CHAR); - columns[6].setTupleType(TupleTypes._CHAR); - columns[7] = new ColumnMetaDataImpl("salary", 8, metadata); - columns[7].setDataType(Types.DECIMAL); - columns[7].setTupleType(TupleTypes._BIGDECIMAL); - columns[8] = new ColumnMetaDataImpl("superssn", 9, metadata); - columns[8].setDataType(Types.VARCHAR); - columns[8].setTupleType(TupleTypes._STRING); - columns[8].setPrimaryKey(true); - columns[9] = new ColumnMetaDataImpl("dno", 10, metadata); - columns[9].setDataType(Types.INTEGER); - columns[9].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); + tableName = "employee"; + columns = new Attribute[10]; + columns[0] = new AttributeImpl("fname", TupleTypes._STRING, tableName); + columns[1] = new AttributeImpl("minit", TupleTypes._CHAR, tableName); + columns[2] = new AttributeImpl("lname", TupleTypes._STRING, tableName); + columns[3] = new AttributeImpl("ssn", TupleTypes._STRING, tableName, true); + columns[4] = new AttributeImpl("bdate", TupleTypes._DATE, tableName); + columns[5] = new AttributeImpl("address", TupleTypes._STRING, tableName); + columns[6] = new AttributeImpl("sex", TupleTypes._CHAR, tableName); + columns[7] = new AttributeImpl("salary", TupleTypes._BIGDECIMAL, tableName); + columns[8] = new AttributeImpl("superssn", TupleTypes._STRING, tableName, true); + columns[9] = new AttributeImpl("dno", TupleTypes._INT, tableName); phys = createPhysicalSchema("l_employee", columns); - schema = new SimpleTableSchema(metadata, "l_employee", node2, phys); + metadata = new SimpleLogicalSchema(tableName, Arrays.asList(columns)); + schema = new SimpleTableSchema("l_employee", node2, metadata, phys); mDictionary.add(schema); // table employee_ - metadata = new TableMetaDataImpl("", "", "employee_r3"); - columns = new ColumnMetaDataImpl[10]; - columns[0] = new ColumnMetaDataImpl("fname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("minit", 2, metadata); - columns[1].setDataType(Types.CHAR); - columns[1].setTupleType(TupleTypes._CHAR); - columns[2] = new ColumnMetaDataImpl("lname", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[3] = new ColumnMetaDataImpl("ssn", 4, metadata); - columns[3].setDataType(Types.VARCHAR); - columns[3].setTupleType(TupleTypes._STRING); - columns[3].setPrimaryKey(true); - columns[4] = new ColumnMetaDataImpl("bdate", 5, metadata); - columns[4].setDataType(Types.DATE); - columns[4].setTupleType(TupleTypes._DATE); - columns[5] = new ColumnMetaDataImpl("address", 6, metadata); - columns[5].setDataType(Types.VARCHAR); - columns[5].setTupleType(TupleTypes._STRING); - columns[6] = new ColumnMetaDataImpl("sex", 7, metadata); - columns[6].setDataType(Types.CHAR); - columns[6].setTupleType(TupleTypes._CHAR); - columns[7] = new ColumnMetaDataImpl("salary", 8, metadata); - columns[7].setDataType(Types.DECIMAL); - columns[7].setTupleType(TupleTypes._BIGDECIMAL); - columns[8] = new ColumnMetaDataImpl("superssn", 9, metadata); - columns[8].setDataType(Types.VARCHAR); - columns[8].setTupleType(TupleTypes._STRING); - columns[8].setPrimaryKey(true); - columns[9] = new ColumnMetaDataImpl("dno", 10, metadata); - columns[9].setDataType(Types.INTEGER); - columns[9].setTupleType(TupleTypes._INT); - metadata.setColumns(columns); + tableName = "employee_r3"; + columns = new Attribute[10]; + columns[0] = new AttributeImpl("fname", TupleTypes._STRING, tableName); + columns[1] = new AttributeImpl("minit", TupleTypes._CHAR, tableName); + columns[2] = new AttributeImpl("lname", TupleTypes._STRING, tableName); + columns[3] = new AttributeImpl("ssn", TupleTypes._STRING, tableName, true); + columns[4] = new AttributeImpl("bdate", TupleTypes._DATE, tableName); + columns[5] = new AttributeImpl("address", TupleTypes._STRING, tableName); + columns[6] = new AttributeImpl("sex", TupleTypes._CHAR, tableName); + columns[7] = new AttributeImpl("salary", TupleTypes._BIGDECIMAL, tableName); + columns[8] = new AttributeImpl("superssn", TupleTypes._STRING, tableName, true); + columns[9] = new AttributeImpl("dno", TupleTypes._INT, tableName); + metadata = new SimpleLogicalSchema(tableName, Arrays.asList(columns)); phys = createPhysicalSchema("l_employee_r3", columns); - schema = new SimpleTableSchema(metadata, "l_employee_r3", node2r3, phys); + schema = new SimpleTableSchema("l_employee_r3", node2r3, metadata, phys); mDictionary.add(schema); // table department - metadata = new TableMetaDataImpl("", "", "department"); - columns = new ColumnMetaDataImpl[4]; - columns[0] = new ColumnMetaDataImpl("dname", 1, metadata); - columns[0].setDataType(Types.VARCHAR); - columns[0].setTupleType(TupleTypes._STRING); - columns[1] = new ColumnMetaDataImpl("dnumber", 2, metadata); - columns[1].setDataType(Types.INTEGER); - columns[1].setTupleType(TupleTypes._INT); - columns[1].setPrimaryKey(true); - columns[2] = new ColumnMetaDataImpl("mgrssn", 3, metadata); - columns[2].setDataType(Types.VARCHAR); - columns[2].setTupleType(TupleTypes._STRING); - columns[2].setPrimaryKey(true); - columns[3] = new ColumnMetaDataImpl("mgrstartdate", 4, metadata); - columns[3].setDataType(Types.DATE); - columns[3].setTupleType(TupleTypes._DATE); - metadata.setColumns(columns); + tableName = "department"; + columns = new Attribute[4]; + columns[0] = new AttributeImpl("dname", TupleTypes._STRING, tableName); + columns[1] = new AttributeImpl("dnumber", TupleTypes._INT, tableName, true); + columns[2] = new AttributeImpl("mgrssn", TupleTypes._STRING, tableName, true); + columns[3] = new AttributeImpl("mgrstartda... [truncated message content] |
From: <amy...@us...> - 2012-11-12 11:33:02
|
Revision: 2141 http://ogsa-dai.svn.sourceforge.net/ogsa-dai/?rev=2141&view=rev Author: amykrause Date: 2012-11-12 11:32:50 +0000 (Mon, 12 Nov 2012) Log Message: ----------- Removed old code that doesn't compile. Added Paths: ----------- sandbox/dqp/server/README.TXT sandbox/dqp/server/ant.properties sandbox/dqp/server/build.xml Removed Paths: ------------- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/OLD_JoinGroup.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OLD_JoinOrderingOptimiser.java sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/orderedmergejoin/ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/range/ Added: sandbox/dqp/server/README.TXT =================================================================== --- sandbox/dqp/server/README.TXT (rev 0) +++ sandbox/dqp/server/README.TXT 2012-11-12 11:32:50 UTC (rev 2141) @@ -0,0 +1,50 @@ + +SQL grammar +=========== + +OGSA-DAI uses the ANTLR SQL grammar tool. The class is in SVN in + +svn/third-party/dependencies/antlr/antlrworks/1.3/antlrworks-1.3.jar + +Run the grammar tool +-------------------- + +You can run the tool as follows: + +$ java -jar antlrworks-1.3.jar + +OGSA-DAI SQL grammars +--------------------- + +We have two files in src/main/grammar + +-SQL92Query.g: SQL to abstract syntax trees. +-SQL92QueryWalker.g: abstract syntax trees to SQL. + +The tool allows the grammars to be edited, for example SQL statements +to be checked and for code to be generated. + +Code generation +--------------- + +For SQL92Query.g the files created are: + +SQL92QueryParser.java +SQL92QueryLexer.java +SQL92Query.tokens + +For SQL92QueryWalker.g the files created are: + +SQL92QueryWalker.java +SQL92QueryWalker.tokens + +This needs the SQL92Query.tokens file to be available. + +Saving updates in SVN +--------------------- + +The tokens files are stored in the same directory as the grammars +i.e. in src/main/grammar + +The Java files are stored in +src/main/java/uk/org/ogsadai/parser/sql92query Added: sandbox/dqp/server/ant.properties =================================================================== --- sandbox/dqp/server/ant.properties (rev 0) +++ sandbox/dqp/server/ant.properties 2012-11-12 11:32:50 UTC (rev 2141) @@ -0,0 +1,62 @@ +# Properties +module.title=OGSA-DAI DQP extensions (server-side) + +# Root build file. +root.build.file=../../../ogsa-dai/trunk/common/build-root.xml + +# Joda-time. +joda-time.lib.dir=${dependencies.dir}/joda-time/joda-time/1.6 +# ANTLRWorks. +antlrworks.lib.dir=${dependencies.dir}/antlr/antlrworks/1.3 +# JAXB-api. +jaxb-api.lib.dir=${dependencies.dir}/javax/xml/bind/jaxb-api/2.1 +# Stax-api +stax-api.lib.dir=${dependencies.dir}/javax/xml/stream/stax-api/1.0-2 +# JUNG +jung-api.lib.dir=${dependencies.dir}/net/sf/jung/jung-api/2.0 +jung-graph-impl.lib.dir=${dependencies.dir}/net/sf/jung/jung-graph-impl/2.0 +# EasyMock +easymock.lib.dir=${dependencies.dir}/easymock/easymock/1.1/ +# JMock +jmock.lib.dir=${dependencies.dir}/org/jmock/jmock/2.5.1/ +# Hamcrest +hamcrest-core.lib.dir=${dependencies.dir}/org/hamcrest/hamcrest-core/1.1 +# JUnit +junit.lib.dir=${dependencies.dir}/junit/junit/3.8.1/ +# Log4J +log4j.lib.dir=${dependencies.dir}/log4j/log4j/1.2.8/ + +# JAXB-api. +jaxb-api.lib.dir=${dependencies.dir}/javax/xml/bind/jaxb-api/2.1 +# JAXB-impl. +jaxb-impl.lib.dir=${dependencies.dir}/com/sun/xml/bind/jaxb-impl/2.1.12 +# JAXB-xcj. +jaxb-xcj.lib.dir=${dependencies.dir}/com/sun/xml/bind/jaxb-xjc/2.1.12 +# Activation. +activation.lib.dir=${dependencies.dir}/javax/activation/activation/1.1 +# Collections generic +collections-generic.lib.dir=${dependencies.dir}/net/sourceforge/collections/collections-generic/4.01 +# Hamcrest +hamcrest-core.lib.dir=${dependencies.dir}/org/hamcrest/hamcrest-core/1.1 +hamcrest-library.lib.dir=${dependencies.dir}/org/hamcrest/hamcrest-library/1.1 + +# Commons Logging +commons.logging.lib.dir=${dependencies.dir}/commons-logging/commons-logging/1.0.4/ +# Spring +spring.asm.lib.dir=${dependencies.dir}/org/springframework/spring-asm/3.0.1.RELEASE +spring.beans.lib.dir=${dependencies.dir}/org/springframework/spring-beans/3.0.1.RELEASE +spring.context.lib.dir=${dependencies.dir}/org/springframework/spring-context/3.0.1.RELEASE +spring.core.lib.dir=${dependencies.dir}/org/springframework/spring-core/3.0.1.RELEASE +spring.expression.lib.dir=${dependencies.dir}/org/springframework/spring-expression/3.0.1.RELEASE +# OGSA-DAI +ogsadai.common.lib.dir=../../../ogsa-dai/trunk/core/common/build/lib +ogsadai.clientserver.lib.dir=../../../ogsa-dai/trunk/core/clientserver/build/lib +ogsadai.server.lib.dir=../../../ogsa-dai/trunk/core/server/build/lib +ogsadai.server.xml.dir=../../../ogsa-dai/trunk/core/server/xml +ogsadai.client.lib.dir=../../../ogsa-dai/trunk/core/client/build/lib +ogsadai.basic.client.lib.dir=../../../ogsa-dai/trunk/extensions/basic/client/build/lib +ogsadai.basic.server.lib.dir=../../../ogsa-dai/trunk/extensions/basic/server/build/lib +ogsadai.relational.client.lib.dir=../../../ogsa-dai/trunk/extensions/relational/client/build/lib +ogsadai.relational.server.lib.dir=../../../ogsa-dai/trunk/extensions/relational/server/build/lib +ogsadai.dqp.bindings.lib.dir=../../../ogsa-dai/trunk/extensions/dqp/bindings/build/lib +ogsadai.dqp.client.lib.dir=../../../ogsa-dai/trunk/extensions/dqp/client/build/lib Added: sandbox/dqp/server/build.xml =================================================================== --- sandbox/dqp/server/build.xml (rev 0) +++ sandbox/dqp/server/build.xml 2012-11-12 11:32:50 UTC (rev 2141) @@ -0,0 +1,207 @@ +<?xml version="1.0"?> + +<!-- Copyright (c) The University of Edinburgh, 2009-2010. --> +<!-- + LICENCE-START + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + LICENCE-END +--> +<project name="dqp-server" default="info"> + + <description> + Build file for OGSA-DAI DQP extensions (server-side). + + Certain default values are specified in ant.properties. + </description> + + <!-- Module directory. --> + <property name="base.dir" value="${basedir}"/> + <!-- Environment variables --> + <property environment="env"/> + <!-- Dependencies --> + <property name="dependencies.dir" value="${env.OGSA_DAI_DEPENDENCIES}"/> + <!-- Packages --> + <property name="packages.dir" value="${env.OGSA_DAI_PACKAGES}"/> + <!-- Load ant.properties --> + <property file="${base.dir}/ant.properties"/> + <!-- Now import utility targets and properties --> + <import file="${root.build.file}"/> + + <!-- CLASSPATHs --> + + <path id="module.build.classpath"> + <pathelement path="${bin.dir}"/> + <fileset dir="${ogsadai.common.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.clientserver.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.server.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.client.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.basic.client.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.basic.server.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.relational.client.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.relational.server.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.dqp.bindings.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${ogsadai.dqp.client.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${joda-time.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${antlrworks.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${jaxb-api.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${stax-api.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${jung-api.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${jung-graph-impl.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.asm.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.beans.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.context.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.core.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.expression.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${log4j.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + </path> + + <path id="module.run.classpath"> + <path refid="module.build.classpath"/> + </path> + + <path id="module.test.build.classpath"> + <pathelement path="${unit.test.bin.dir}"/> + <path refid="module.build.classpath"/> + <fileset dir="${jmock.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${easymock.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${junit.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${hamcrest-core.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + </path> + + <path id="module.test.run.classpath"> + <path refid="module.run.classpath"/> + <path refid="module.test.build.classpath"/> + <pathelement path="${ogsadai.server.xml.dir}"/> + <fileset dir="${jaxb-api.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${jaxb-impl.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${jaxb-xcj.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${activation.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${collections-generic.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${hamcrest-core.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${hamcrest-library.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${commons.logging.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.asm.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.beans.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.context.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.core.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + <fileset dir="${spring.expression.lib.dir}"> + <include name="**/*.jar"/> + </fileset> + </path> + + <!-- + Override root "deploy" target. + --> + <target name="deploy" description="Deploy module"> + <antcall target="ogsadai-root.deploy"/> + <antcall target="ogsadai-root.copyJAR"/> + <antcall target="ogsadai-root.copyJavaDocSource"/> + <antcall target="ogsadai-root.copyServerConfiguration"/> + <antcall target="ogsadai-root.configure"/> + <antcall target="updateSpring"/> + </target> + + <!-- + Update Spring configuration file in server.classes.dir + but only if server.classes.dir is.set + --> + <target name="updateSpring" + description="Update Spring configuration with DQP" + if="server.classes.dir"> + <property name="spring.file" + value="${server.classes.dir}/ogsadai-context.xml/"/> + <!-- Enable DQP functionality. --> + <replace file="${spring.file}" value=""> + <replacetoken><![CDATA[ <!-- DQP-BEANS]]></replacetoken> + </replace> + <replace file="${spring.file}" value=""> + <replacetoken><![CDATA[ DQP-BEANS -->]]></replacetoken> + </replace> + </target> +</project> Deleted: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/OLD_JoinGroup.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/OLD_JoinGroup.java 2012-11-09 16:15:21 UTC (rev 2140) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/OLD_JoinGroup.java 2012-11-12 11:32:50 UTC (rev 2141) @@ -1,295 +0,0 @@ -// Copyright (c) The University of Edinburgh, 2009. -// -// LICENCE-START -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// LICENCE-END - -package uk.org.ogsadai.dqp.lqp; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; - -/** - * A class used to represent a group of relations (with related predicates) that - * can be joined together in any order. - * - * @author The OGSA-DAI Project Team - */ -public class OLD_JoinGroup -{ - /** Copyright statement. */ - private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2009"; - - /** List of applicable predicates. */ - private List<Predicate> mPredicateList = new ArrayList<Predicate>(); - /** List of relations in the join group. */ - private List<Operator> mRelationList = new ArrayList<Operator>(); - /** Operator following the joing group. **/ - private Operator mJoinGroupParent; - /** Original child of the join group parent. **/ - private Operator mJoinGroupParentOriginalChild; - /** ? **/ - protected boolean mHasJoin; - - /** - * Creates a new join group. The first join group operator is used to - * extract information about the join group parent. Joins inside the group - * can be freely reordered, so information about the join group parent is - * needed to reconnect the reordered join tree. - * - * @param rootGroupOperator - * first operator in the join group - */ - public OLD_JoinGroup(Operator rootGroupOperator) - { - mJoinGroupParentOriginalChild = rootGroupOperator; - mJoinGroupParent = rootGroupOperator.getParent(); - } - - /** - * Adds a predicate to the join group. - * - * @param predicate - * predicate object - */ - public void addPredicate(Predicate predicate) - { - mPredicateList.add(predicate); - } - - /** - * Adds a relation to the join group. - * - * @param operator - * operator whose result represents intermediate relation - */ - public void addRelation(Operator operator) - { - mRelationList.add(operator); - } - - /** - * Gets the read-only list of relations in the join group. - * - * @return list of relations - */ - public List<Operator> getRelations() - { - return Collections.unmodifiableList(mRelationList); - } - - /** - * Gets the read-only list of predicates for the join group. - * - * @return list of predicates - */ - public List<Predicate> getPredicates() - { - return Collections.unmodifiableList(mPredicateList); - } - - /** - * Reconnects reordered join tree with the original parent of the join - * group. - * - * @param operator - * root of the reordered join tree. - */ - public void reconnectWithParent(Operator operator) - { - mJoinGroupParent.replaceChild(mJoinGroupParentOriginalChild, operator); - } - - /** - * Returns a list of possible inner joins. If a merged heading of any two - * relations from the join group contains all attributes used in the - * predicate then the two relations and a predicate represent a possible - * inner join. - * - * @return a list of possible inner joins - */ - public List<PossibleJoin> getPossibleInnerJoins() - { - // XXX ACH - I don't think this should happen as we called - // getPossibleInnerJoins on the join group. But I think - // there may be a bug in that method because it just checks - // that LHS and RHS together contains all the attributes, it - // does not check the instances where one of them alone does. - - List<PossibleJoin> possibleInnerJoins = new ArrayList<PossibleJoin>(); - - for (Predicate p : mPredicateList) - { - Set<Attribute> usedAttrs = p.getAttributes(); - for (int i = 0; i < mRelationList.size() - 1; i++) - { - Operator firstOp = mRelationList.get(i); - for (int j = i + 1; j < mRelationList.size(); j++) - { - Operator secondOp = mRelationList.get(j); - if (firstOp.getHeading().createMerged( - secondOp.getHeading()) - .containsAllUnambiguous(usedAttrs)) - { - // XXX ACH - I think this may well be a bug. - // we only want the cases where firstOp and secondOp - // HAVE TO be combined to contain all the attributes. - // If one of them happens to contain them all then - // this would be a product. No? - - possibleInnerJoins.add(new PossibleJoin(firstOp, - secondOp, p)); - } - } - } - } - return possibleInnerJoins; - } - - /** - * Get all possible joins. Cross joins have the predicate set to - * <code>null</code>. - * - * @return a list of all possible joins - */ - public List<PossibleJoin> getPossibleJoins() - { - List<PossibleJoin> possibleJoins = getPossibleInnerJoins(); - - for (int i = 0; i < mRelationList.size() - 1; i++) - { - Operator leftOp = mRelationList.get(i); - for (int j = i + 1; j < mRelationList.size(); j++) - { - Operator rightOp = mRelationList.get(j); - possibleJoins.add(new PossibleJoin(leftOp, rightOp, null)); - } - } - return possibleJoins; - } - - /** - * Removes a possible join (relations and a predicate) from the join list. - * - * @param join - * possible join to be removed - */ - public void removePossibleJoin(PossibleJoin join) - { - mPredicateList.remove(join.getPredicate()); - mRelationList.remove(join.getFirstRelation()); - mRelationList.remove(join.getSecondRelation()); - } - - /** - * {@inheritDoc} - */ - public String toString() - { - StringBuilder sb = new StringBuilder(); - sb.append("JoinGroup(\n Predicates: "); - for (Predicate p : mPredicateList) - sb.append("\t").append(p).append('\n'); - sb.append("\n Relations: "); - for (Operator o : mRelationList) - sb.append("\t").append(o.getHeading().getAttributes()).append('\n'); - return sb.toString(); - } - - /** - * A possible join POJO. - * - * @author The OGSA-DAI Project Team - */ - public class OLD_PossibleJoin - { - /** Copyright statement */ - private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2009"; - - /** First of the joined relations. */ - private Operator mFirstOperand; - /** Second of the joined relations. */ - private Operator mSecondOperand; - /** Join predicate. */ - private Predicate mPredicate; - - /** - * Constructor. - * - * @param firstRelation - * first joined relation - * @param secondRelation - * second joined relation - * @param predicate - * join predicate - */ - public PossibleJoin(Operator firstRelation, Operator secondRelation, - Predicate predicate) - { - mFirstOperand = firstRelation; - mSecondOperand = secondRelation; - mPredicate = predicate; - } - - /** - * Gets first join relation. - * - * @return operator representing intermediate relation - */ - public Operator getFirstRelation() - { - return mFirstOperand; - } - - /** - * Gets second join relation. - * - * @return operator representing intermediate relation - */ - public Operator getSecondRelation() - { - return mSecondOperand; - } - - /** - * Gets join predicate. - * - * @return join predicate - */ - public Predicate getPredicate() - { - return mPredicate; - } - - /** - * Checks if possible join is a cross join (product). - * - * @return <code>true</code> if there is no predicate associated with - * this join - */ - public boolean isProduct() - { - return mPredicate == null; - } - - public String toString() - { - return "PossibleJoin( predicate = " + mPredicate +")"; - } - } - -} Deleted: sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OLD_JoinOrderingOptimiser.java =================================================================== --- sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OLD_JoinOrderingOptimiser.java 2012-11-09 16:15:21 UTC (rev 2140) +++ sandbox/dqp/server/src/main/java/uk/org/ogsadai/dqp/lqp/optimiser/join/OLD_JoinOrderingOptimiser.java 2012-11-12 11:32:50 UTC (rev 2141) @@ -1,388 +0,0 @@ -// Copyright (c) The University of Edinburgh, 2008. -// -// LICENCE-START -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// LICENCE-END - -package uk.org.ogsadai.dqp.lqp.optimiser.join; - -import java.util.Collections; -import java.util.List; -import java.util.Set; - -import uk.org.ogsadai.dqp.common.CompilerConfiguration; -import uk.org.ogsadai.dqp.common.DataNode; -import uk.org.ogsadai.dqp.common.EvaluationNode; -import uk.org.ogsadai.dqp.common.RequestDetails; -import uk.org.ogsadai.dqp.lqp.Annotation; -import uk.org.ogsadai.dqp.lqp.Attribute; -import uk.org.ogsadai.dqp.lqp.JoinGroup; -import uk.org.ogsadai.dqp.lqp.JoinGroup.PossibleJoin; -import uk.org.ogsadai.dqp.lqp.JoinGroupExtractor; -import uk.org.ogsadai.dqp.lqp.Operator; -import uk.org.ogsadai.dqp.lqp.Predicate; -import uk.org.ogsadai.dqp.lqp.ScanOperator; -import uk.org.ogsadai.dqp.lqp.cardinality.CardinalityEstimator; -import uk.org.ogsadai.dqp.lqp.exceptions.LQPException; -import uk.org.ogsadai.dqp.lqp.operators.InnerThetaJoinOperator; -import uk.org.ogsadai.dqp.lqp.operators.ProductOperator; -import uk.org.ogsadai.dqp.lqp.operators.SelectOperator; -import uk.org.ogsadai.dqp.lqp.optimiser.Optimiser; -import uk.org.ogsadai.dqp.lqp.optimiser.partitioner.PartitioningOptimiser; -import uk.org.ogsadai.resource.dataresource.dqp.RequestDQPFederation; - -/** - * Join ordering optimiser. - * <p> - * Chooses the order of the joins in the query plan. A greedy algorithm is used - * to select join order. The output execution chain starts with inner theta - * joins, then contains projects and finally any remaining select predicates. - * The output join order will be a bushy tree (rather then left-deep or - * right-deep). - * <p> - * The cost of each possible join is determined the following information: - * <ol> - * <li>Are the two sides of the join on the same data node (i.e. same database). - * True is higher ranked.</li> - * <li>Are the two sides of the join on the evaluation node (i.e. exposed by the - * same OGSA-DAI server. True is higher ranked.</li> - * <li>The join selectivity, which is defined as (join cardinality)/((left child - * cardinality)*(right child cardinality)). Lower values are higher ranked. - * </ol> - * Inner theta joins are ranked according to the above criteria in the order - * specified. The data nodes and evaluation nodes are part of the criteria to - * support later implosion where the whole join can be sent to the data source. - * Even when this cannot be done, prioritising the join of to branches on the - * same evaluation may reduce the amount of data transfer to be performed. - * <p> - * Cross joins (products) can be performed in any order and this optimiser - * makes not effort to optimise that ordering. - * <p> - * There are a few limitation of this algorithm and implementation: - * <ul> - * <li> - * The greedy algorithm is not optimal. But according to Database System - * Implementation by Garcia-Molina, Ullman and Widom it is a commonly used - * heuristic (section 7.6.6, page 403) - * </li> - * <li> - * Although we prioritise the possibility of implosion by preferring the - * case where both sides of the join are on the same data node we do not - * check to confirm the join (and the child query plan) can actually be - * imploded. - * </li> - * <li> - * Highly ranking joins where both sides are on the same evaluation node may - * not be the best approach. This is left over from an initial attempt at - * prioritising same data node joins that mistakenly used the evaluation node - * rather than the data node (see ticket #327). - * </li> - * <li> - * No optimisation is carried out for the ordering of products. - * </li> - * <li> - * The ranking assumes that imploding the join is always best. In some - * cases this may lead to more data being transferred than if the join - * was executed by OGSA-DAI. - * </li> - * <li> - * The ranking and the greedy algorithm are both coded in this class. A - * better design would be to decouple these and have the ranking objects - * passed to this optimiser. This class would therefore implement the - * just greedy algorithm using an abstract ranking object. - * </li> - * </ul> - * - * @author The OGSA-DAI Project Team. - */ -public class OLD_JoinOrderingOptimiser implements Optimiser -{ - /** Copyright notice. */ - private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2008"; - - private CardinalityEstimator mCardinalityEstimator; - - public void setCardinalityEstimator( - CardinalityEstimator cardinalityEstimator) - { - mCardinalityEstimator = cardinalityEstimator; - } - - private void processJoinGroup( - JoinGroup joinGroup, - PartitioningOptimiser partitioner) - throws LQPException - { - Operator currentRoot = null; - while (joinGroup.getPossibleInnerJoins().size() > 0) - { - Operator bestJoin = null; - PossibleJoin bestPossibleJoin = null; - JoinRank bestRank = new JoinRank(0, false, false, false); - - for (PossibleJoin pj : joinGroup.getPossibleInnerJoins()) - { - Operator joinOperator; - - joinOperator = new InnerThetaJoinOperator(pj.getPredicate()); - - Operator firstOperand = pj.getFirstRelation(); - Operator secondOperand = pj.getSecondRelation(); - - joinOperator.setChild(1, firstOperand); - joinOperator.setChild(0, secondOperand); - joinOperator.update(); - - // estimate selectivity - joinOperator.accept(mCardinalityEstimator); - double selectivity = - Annotation.getCardinalityAnnotation(joinOperator) / - Annotation.getCardinalityAnnotation(joinOperator.getChild(0)) * - Annotation.getCardinalityAnnotation(joinOperator.getChild(1)); - - partitioner.annotateWithEvalNodes(joinOperator); - - boolean tableScanJoin = firstOperand instanceof ScanOperator - && secondOperand instanceof ScanOperator; - - DataNode firstOpDataNode = - Annotation.getDataNodeAnnotation(firstOperand); - DataNode secondOpDataNode = - Annotation.getDataNodeAnnotation(secondOperand); - EvaluationNode firstOpEvaluationNode = - Annotation.getEvaluationNodeAnnotation(firstOperand); - EvaluationNode secondOpEvaluationNode = - Annotation.getEvaluationNodeAnnotation(secondOperand); - - boolean sameDataNode = - firstOpDataNode != null && - secondOpDataNode != null && - firstOpDataNode.equals(secondOpDataNode); - - boolean sameEvaluationNode = - firstOpEvaluationNode != null && - secondOpEvaluationNode != null && - firstOpEvaluationNode.equals(secondOpEvaluationNode); - - - JoinRank currentRank = new JoinRank( - selectivity, - sameDataNode, - sameEvaluationNode, - tableScanJoin); - joinOperator.disconnect(); - - if (currentRank.higherThan(bestRank) - || bestPossibleJoin == null) - { - bestRank = currentRank; - bestJoin = joinOperator; - bestPossibleJoin = pj; - } - } - - Operator firstOp = bestPossibleJoin.getFirstRelation(); - Operator secondOp = bestPossibleJoin.getSecondRelation(); - - Predicate p = ((InnerThetaJoinOperator) bestJoin) - .getPredicate(); - Set<Attribute> attrList = p.getAttributes(); - - boolean selFirst = firstOp.getHeading() - .containsAllUnambiguous(attrList); - boolean selSecond = secondOp.getHeading() - .containsAllUnambiguous(attrList); - - if(selFirst || selSecond) - { - // XXX ACH - I don't think this should happen as we called - // getPossibleInnerJoins on the join group. But I think - // there may be a bug in that method because it just checks - // that LHS and RHS together contains all the attributes, it - // does not check the instances where one of them alone does. - - // this join is really a product - Operator sel = new SelectOperator(p.getClone()); - Operator prd = new ProductOperator(); - - prd.setChild(0, sel); - if(selFirst) - { - sel.setChild(0, firstOp); - prd.setChild(1, secondOp); - } - else - { - sel.setChild(0, secondOp); - prd.setChild(1, firstOp); - } - prd.update(); - joinGroup.addRelation(prd); - joinGroup.removePossibleJoin(bestPossibleJoin); - - currentRoot = prd; - } - else - { - // this is a proper join - bestJoin.setChild(0, firstOp); - bestJoin.setChild(1, secondOp); - bestJoin.update(); - - joinGroup.addRelation(bestJoin); - joinGroup.removePossibleJoin(bestPossibleJoin); - - currentRoot = bestJoin; - } - } - - // Cross joins may still be there - while (joinGroup.getPossibleJoins().size() > 0) - { - PossibleJoin possibleJoin = joinGroup.getPossibleJoins().get(0); - ProductOperator productOp = new ProductOperator(); - - productOp.setChild(1, possibleJoin.getFirstRelation()); - productOp.setChild(0, possibleJoin.getSecondRelation()); - productOp.update(); - - joinGroup.addRelation(productOp); - joinGroup.removePossibleJoin(possibleJoin); - - currentRoot = productOp; - } - - // It may happen that we will have some predicates left - for (Predicate p : joinGroup.getPredicates()) - { - SelectOperator selOp = new SelectOperator(currentRoot, p); - currentRoot = selOp; - } - joinGroup.reconnectWithParent(currentRoot); - } - - - /** - * {@inheritDoc} - */ - public Operator optimise( - Operator lqpRoot, - RequestDQPFederation requestFederation, - CompilerConfiguration compilerConfiguration, - RequestDetails requestDetails) throws LQPException - { - mCardinalityEstimator.setDataDictionary( - requestFederation.getDataDictionary()); - - List<JoinGroup> joinGroups = JoinGroupExtractor.getJoinGroups(lqpRoot); - // We want to process join groups starting from the "leaf groups" - Collections.reverse(joinGroups); - - PartitioningOptimiser partitioner = new PartitioningOptimiser(); - - for (JoinGroup joinGroup : joinGroups) - { - System.out.println("Join group: " + joinGroup); - processJoinGroup(joinGroup, partitioner); - } - - return lqpRoot; - } - - /** - * Private class for join ranks. - * - * @author The OGSA-DAI Project Team. - */ - private class JoinRank - { - /** Copyright notice. */ - private static final String COPYRIGHT_NOTICE = - "Copyright (c) The University of Edinburgh, 2008"; - - /** Selectivity. **/ - private double mSelectivity; - /** Indicates if the two operands are on the same data node. */ - private boolean mSameDataNode; - /** Indicates if the two operands are on the same evaluation node. */ - private boolean mSameEvaluationNode; - /** Indicates it a join involves two table scans. */ - private boolean mTableScanJoin; - - /** - * Constructor. - * - * @param selectivity - * join selectivity factor - * @param sameDataNode - * signifies if the two operands are on the same data node - * @param sameEvaluationNode - * signifies if two two operands are on the same evaluation node - * @param tableScanJoin - * signifies if both join operands are table scans - */ - public JoinRank( - double selectivity, - boolean sameDataNode, - boolean sameEvaluationNode, - boolean tableScanJoin) - { - mSelectivity = selectivity; - mSameDataNode = sameDataNode; - mSameEvaluationNode = sameEvaluationNode; - mTableScanJoin = tableScanJoin; - } - - /** - * Compares two ranks. - * - * @param rank - * rank to compare - * @return <code>true</code> is rank is higher than rank passed as - * argument. - */ - public boolean higherThan(JoinRank rank) - { - if (mSameDataNode && !rank.mSameDataNode) - { - return true; - } - else if (mSameEvaluationNode && !rank.mSameEvaluationNode) - { - return true; - } - else - { - if (mTableScanJoin && !rank.mTableScanJoin) - { - return true; - } - else - { - return (mSelectivity > rank.mSelectivity) ? true : false; - } - } - } - - /** - * {@inheritDoc} - */ - public String toString() - { - return "S: " + mSelectivity + "\tSameDataNode: " + mSameDataNode - + "\tSameEvaluationNode " + mSameEvaluationNode + "\tTSJ: " + - mTableScanJoin; - } - } -} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |