cayenne-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aadamc...@apache.org
Subject [10/12] cayenne git commit: CAY-2026 Java 7
Date Sat, 12 Sep 2015 10:41:13 GMT
http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomain.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomain.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomain.java
index e8ba6ff..718b55a 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomain.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomain.java
@@ -62,753 +62,753 @@ import org.apache.cayenne.util.ToStringBuilder;
  */
 public class DataDomain implements QueryEngine, DataChannel {
 
-    public static final String SHARED_CACHE_ENABLED_PROPERTY = "cayenne.DataDomain.sharedCache";
-    public static final boolean SHARED_CACHE_ENABLED_DEFAULT = true;
-
-    public static final String VALIDATING_OBJECTS_ON_COMMIT_PROPERTY = "cayenne.DataDomain.validatingObjectsOnCommit";
-    public static final boolean VALIDATING_OBJECTS_ON_COMMIT_DEFAULT = true;
-
-    /**
-     * @deprecated since 4.0 See {@link Constants#SERVER_EXTERNAL_TX_PROPERTY}.
-     */
-    @Deprecated
-    public static final String USING_EXTERNAL_TRANSACTIONS_PROPERTY = "cayenne.DataDomain.usingExternalTransactions";
-
-    /**
-     * @deprecated since 4.0 See {@link Constants#SERVER_EXTERNAL_TX_PROPERTY}.
-     */
-    @Deprecated
-    public static final boolean USING_EXTERNAL_TRANSACTIONS_DEFAULT = false;
-
-    /**
-     * @since 3.1
-     */
-    @Inject
-    protected JdbcEventLogger jdbcEventLogger;
-
-    /**
-     * @since 4.0
-     */
-    @Inject
-    protected TransactionManager transactionManager;
-
-    /**
-     * @since 3.1
-     */
-    protected int maxIdQualifierSize;
-
-    /**
-     * @since 3.1
-     */
-    protected List<DataChannelFilter> filters;
-
-    protected Map<String, DataNode> nodes;
-    protected Map<String, DataNode> nodesByDataMapName;
-    protected DataNode defaultNode;
-    protected Map<String, String> properties;
-
-    protected EntityResolver entityResolver;
-    protected DataRowStore sharedSnapshotCache;
-    protected String name;
-    protected QueryCache queryCache;
-
-    // these are initialized from properties...
-    protected boolean sharedCacheEnabled;
-    protected boolean validatingObjectsOnCommit;
-
-    /**
-     * @since 1.2
-     */
-    protected EventManager eventManager;
-
-    /**
-     * @since 1.2
-     */
-    protected EntitySorter entitySorter;
-
-    protected boolean stopped;
-
-    /**
-     * Creates a DataDomain and assigns it a name.
-     */
-    public DataDomain(String name) {
-        init(name);
-        resetProperties();
-    }
-
-    /**
-     * Creates new DataDomain.
-     * 
-     * @param name
-     *            DataDomain name. Domain can be located using its name in the
-     *            Configuration object.
-     * @param properties
-     *            A Map containing domain configuration properties.
-     * @deprecated since 4.0 unused
-     */
-    @Deprecated
-    public DataDomain(String name, Map properties) {
-        init(name);
-        initWithProperties(properties);
-    }
-
-    private void init(String name) {
-
-        this.filters = new CopyOnWriteArrayList<DataChannelFilter>();
-        this.nodesByDataMapName = new ConcurrentHashMap<String, DataNode>();
-        this.nodes = new ConcurrentHashMap<String, DataNode>();
-
-        // properties are read-only, so no need for concurrent map, or any
-        // specific map
-        // for that matter
-        this.properties = Collections.EMPTY_MAP;
-
-        setName(name);
-    }
-
-    /**
-     * Checks that Domain is not stopped. Throws DomainStoppedException
-     * otherwise.
-     * 
-     * @since 3.0
-     */
-    protected void checkStopped() throws DomainStoppedException {
-        if (stopped) {
-            throw new DomainStoppedException("Domain " + name
-                    + " was shutdown and can no longer be used to access the database");
-        }
-    }
-
-    /**
-     * @since 3.1
-     */
-    public EntitySorter getEntitySorter() {
-        return entitySorter;
-    }
-
-    /**
-     * @since 3.1
-     */
-    public void setEntitySorter(EntitySorter entitySorter) {
-        this.entitySorter = entitySorter;
-    }
-
-    /**
-     * @since 1.1
-     */
-    protected void resetProperties() {
-        properties = Collections.EMPTY_MAP;
-
-        sharedCacheEnabled = SHARED_CACHE_ENABLED_DEFAULT;
-        validatingObjectsOnCommit = VALIDATING_OBJECTS_ON_COMMIT_DEFAULT;
-    }
-
-    /**
-     * Reinitializes domain state with a new set of properties.
-     * 
-     * @since 1.1
-     * @deprecated since 4.0 properties are processed by the DI provider.
-     */
-    @Deprecated
-    public void initWithProperties(Map<String, String> properties) {
-
-        // clone properties to ensure that it is read-only internally
-        properties = properties != null ? new HashMap<String, String>(properties) : Collections.EMPTY_MAP;
-
-        String sharedCacheEnabled = properties.get(SHARED_CACHE_ENABLED_PROPERTY);
-        String validatingObjectsOnCommit = properties.get(VALIDATING_OBJECTS_ON_COMMIT_PROPERTY);
-
-        // init ivars from properties
-        this.sharedCacheEnabled = (sharedCacheEnabled != null) ? "true".equalsIgnoreCase(sharedCacheEnabled)
-                : SHARED_CACHE_ENABLED_DEFAULT;
-        this.validatingObjectsOnCommit = (validatingObjectsOnCommit != null) ? "true"
-                .equalsIgnoreCase(validatingObjectsOnCommit) : VALIDATING_OBJECTS_ON_COMMIT_DEFAULT;
-
-        this.properties = properties;
-    }
-
-    /**
-     * Returns EventManager used by this DataDomain.
-     * 
-     * @since 1.2
-     */
-    public EventManager getEventManager() {
-        return eventManager;
-    }
-
-    /**
-     * Sets EventManager used by this DataDomain.
-     * 
-     * @since 1.2
-     */
-    public void setEventManager(EventManager eventManager) {
-        this.eventManager = eventManager;
-
-        if (sharedSnapshotCache != null) {
-            sharedSnapshotCache.setEventManager(eventManager);
-        }
-    }
-
-    /**
-     * Returns "name" property value.
-     */
-    public String getName() {
-        return name;
-    }
-
-    /**
-     * Sets "name" property to a new value.
-     */
-    public synchronized void setName(String name) {
-        this.name = name;
-        if (sharedSnapshotCache != null) {
-            this.sharedSnapshotCache.setName(name);
-        }
-    }
-
-    /**
-     * Returns <code>true</code> if DataContexts produced by this DataDomain are
-     * using shared DataRowStore. Returns <code>false</code> if each DataContext
-     * would work with its own DataRowStore. Note that this setting can be
-     * overwritten per DataContext.
-     */
-    public boolean isSharedCacheEnabled() {
-        return sharedCacheEnabled;
-    }
-
-    public void setSharedCacheEnabled(boolean sharedCacheEnabled) {
-        this.sharedCacheEnabled = sharedCacheEnabled;
-    }
-
-    /**
-     * Returns whether child DataContexts default behavior is to perform object
-     * validation before commit is executed.
-     * 
-     * @since 1.1
-     */
-    public boolean isValidatingObjectsOnCommit() {
-        return validatingObjectsOnCommit;
-    }
-
-    /**
-     * Sets the property defining whether child DataContexts should perform
-     * object validation before commit is executed.
-     * 
-     * @since 1.1
-     */
-    public void setValidatingObjectsOnCommit(boolean flag) {
-        this.validatingObjectsOnCommit = flag;
-    }
-
-    /**
-     * @since 1.1
-     * @return a Map of properties for this DataDomain.
-     */
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-
-    /**
-     * Returns snapshots cache for this DataDomain, lazily initializing it on
-     * the first call if 'sharedCacheEnabled' flag is true.
-     */
-    public DataRowStore getSharedSnapshotCache() {
-        if (sharedSnapshotCache == null && sharedCacheEnabled) {
-            this.sharedSnapshotCache = nonNullSharedSnapshotCache();
-        }
-
-        return sharedSnapshotCache;
-    }
-
-    /**
-     * Returns a guaranteed non-null shared snapshot cache regardless of the
-     * 'sharedCacheEnabled' flag setting.
-     */
-    synchronized DataRowStore nonNullSharedSnapshotCache() {
-        if (sharedSnapshotCache == null) {
-            this.sharedSnapshotCache = new DataRowStore(name, properties, eventManager);
-        }
-
-        return sharedSnapshotCache;
-    }
-
-    /**
-     * Shuts down the previous cache instance, sets cache to the new
-     * DataSowStore instance and updates two properties of the new DataSowStore:
-     * name and eventManager.
-     */
-    public synchronized void setSharedSnapshotCache(DataRowStore snapshotCache) {
-        if (this.sharedSnapshotCache != snapshotCache) {
-            if (this.sharedSnapshotCache != null) {
-                this.sharedSnapshotCache.shutdown();
-            }
-            this.sharedSnapshotCache = snapshotCache;
-
-            if (snapshotCache != null) {
-                snapshotCache.setEventManager(getEventManager());
-                snapshotCache.setName(getName());
-            }
-        }
-    }
-
-    public void addDataMap(DataMap dataMap) {
-        getEntityResolver().addDataMap(dataMap);
-        refreshEntitySorter();
-    }
-
-    /**
-     * @since 3.1
-     */
-    public DataMap getDataMap(String mapName) {
-        return getEntityResolver().getDataMap(mapName);
-    }
-
-    /**
-     * Removes named DataMap from this DataDomain and any underlying DataNodes
-     * that include it.
-     * 
-     * @since 3.1
-     */
-    public void removeDataMap(String mapName) {
-        DataMap map = getDataMap(mapName);
-        if (map == null) {
-            return;
-        }
-
-        // remove from data nodes
-        for (DataNode node : nodes.values()) {
-            node.removeDataMap(mapName);
-        }
-
-        nodesByDataMapName.remove(mapName);
-
-        // remove from EntityResolver
-        getEntityResolver().removeDataMap(map);
-
-        refreshEntitySorter();
-    }
-
-    /**
-     * Removes a DataNode from DataDomain. Any maps previously associated with
-     * this node within domain will still be kept around, however they wan't be
-     * mapped to any node.
-     */
-    public void removeDataNode(String nodeName) {
-        DataNode removed = nodes.remove(nodeName);
-        if (removed != null) {
-
-            removed.setEntityResolver(null);
-
-            Iterator<DataNode> it = nodesByDataMapName.values().iterator();
-            while (it.hasNext()) {
-                if (it.next() == removed) {
-                    it.remove();
-                }
-            }
-        }
-    }
-
-    /**
-     * Returns a collection of registered DataMaps.
-     */
-    public Collection<DataMap> getDataMaps() {
-        return getEntityResolver().getDataMaps();
-    }
-
-    /**
-     * Returns an unmodifiable collection of DataNodes associated with this
-     * domain.
-     */
-    public Collection<DataNode> getDataNodes() {
-        return Collections.unmodifiableCollection(nodes.values());
-    }
-
-    /**
-     * Adds new DataNode.
-     */
-    public void addNode(DataNode node) {
-
-        // add node to name->node map
-        nodes.put(node.getName(), node);
-        node.setEntityResolver(getEntityResolver());
-
-        // add node to "ent name->node" map
-        for (DataMap map : node.getDataMaps()) {
-            addDataMap(map);
-            nodesByDataMapName.put(map.getName(), node);
-        }
-    }
-
-    /**
-     * Returns registered DataNode whose name matches <code>name</code>
-     * parameter.
-     * 
-     * @since 3.1
-     */
-    public DataNode getDataNode(String nodeName) {
-        return nodes.get(nodeName);
-    }
-
-    /**
-     * Returns a DataNode that should handle queries for all entities in a
-     * DataMap.
-     * 
-     * @since 1.1
-     */
-    public DataNode lookupDataNode(DataMap map) {
-
-        DataNode node = nodesByDataMapName.get(map.getName());
-        if (node == null) {
-
-            // see if one of the node states has changed, and the map is now
-            // linked...
-            for (DataNode n : getDataNodes()) {
-                for (DataMap m : n.getDataMaps()) {
-                    if (m == map) {
-                        nodesByDataMapName.put(map.getName(), n);
-                        node = n;
-                        break;
-                    }
-                }
-
-                if (node != null) {
-                    break;
-                }
-            }
-
-            if (node == null) {
-
-                if (defaultNode != null) {
-                    nodesByDataMapName.put(map.getName(), defaultNode);
-                    node = defaultNode;
-                } else {
-                    throw new CayenneRuntimeException("No DataNode configured for DataMap '" + map.getName()
-                            + "' and no default DataNode set");
-                }
-            }
-        }
-
-        return node;
-    }
-
-    /**
-     * Sets EntityResolver. If not set explicitly, DataDomain creates a default
-     * EntityResolver internally on demand.
-     * 
-     * @since 1.1
-     */
-    public void setEntityResolver(EntityResolver entityResolver) {
-        this.entityResolver = entityResolver;
-    }
-
-    // creates default entity resolver if there is none set yet
-    private synchronized void createEntityResolver() {
-        if (entityResolver == null) {
-            // entity resolver will be self-indexing as we add all our maps
-            // to it as they are added to the DataDomain
-            entityResolver = new EntityResolver();
-        }
-    }
-
-    /**
-     * Shutdowns all owned data nodes and marks this domain as stopped.
-     */
-    @BeforeScopeEnd
-    public void shutdown() {
-        if (!stopped) {
-            stopped = true;
-
-            if (sharedSnapshotCache != null) {
-                sharedSnapshotCache.shutdown();
-            }
-        }
-    }
-
-    /**
-     * Routes queries to appropriate DataNodes for execution.
-     */
-    public void performQueries(final Collection<? extends Query> queries, final OperationObserver callback) {
-
-        transactionManager.performInTransaction(new TransactionalOperation<Object>() {
-            @Override
-            public Object perform() {
-                new DataDomainLegacyQueryAction(DataDomain.this, new QueryChain(queries), callback).execute();
-                return null;
-            }
-        });
-    }
-
-    // ****** DataChannel methods:
-
-    /**
-     * Runs query returning generic QueryResponse.
-     * 
-     * @since 1.2
-     */
-    @Override
-    public QueryResponse onQuery(final ObjectContext originatingContext, final Query query) {
-        checkStopped();
-
-        return new DataDomainQueryFilterChain().onQuery(originatingContext, query);
-    }
-
-    QueryResponse onQueryNoFilters(final ObjectContext originatingContext, final Query query) {
-        // transaction note:
-        // we don't wrap this code in transaction to reduce transaction scope to
-        // just the DB operation for better performance ... query action will
-        // start a transaction itself when and if needed
-        return new DataDomainQueryAction(originatingContext, DataDomain.this, query).execute();
-    }
-
-    /**
-     * Returns an EntityResolver that stores mapping information for this
-     * domain.
-     */
-    @Override
-    public EntityResolver getEntityResolver() {
-        if (entityResolver == null) {
-            createEntityResolver();
-        }
-
-        return entityResolver;
-    }
-
-    /**
-     * Only handles commit-type synchronization, ignoring any other type.
-     * 
-     * @since 1.2
-     */
-    @Override
-    public GraphDiff onSync(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
-
-        checkStopped();
-
-        return new DataDomainSyncFilterChain().onSync(originatingContext, changes, syncType);
-    }
-
-    GraphDiff onSyncNoFilters(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
-        DataChannelSyncCallbackAction callbackAction = DataChannelSyncCallbackAction.getCallbackAction(
-                getEntityResolver().getCallbackRegistry(), originatingContext.getGraphManager(), changes, syncType);
-
-        callbackAction.applyPreCommit();
-
-        GraphDiff result;
-        switch (syncType) {
-        case DataChannel.ROLLBACK_CASCADE_SYNC:
-            result = onSyncRollback(originatingContext);
-            break;
-        // "cascade" and "no_cascade" are the same from the DataDomain
-        // perspective,
-        // including transaction handling logic
-        case DataChannel.FLUSH_NOCASCADE_SYNC:
-        case DataChannel.FLUSH_CASCADE_SYNC:
-            result = transactionManager.performInTransaction(new TransactionalOperation<GraphDiff>() {
-                @Override
-                public GraphDiff perform() {
-                    return onSyncFlush(originatingContext, changes);
-                }
-            });
-
-            break;
-        default:
-            throw new CayenneRuntimeException("Invalid synchronization type: " + syncType);
-        }
-
-        callbackAction.applyPostCommit();
-        return result;
-    }
-
-    GraphDiff onSyncRollback(ObjectContext originatingContext) {
-        // if there is a transaction in progress, roll it back
-
-        Transaction transaction = BaseTransaction.getThreadTransaction();
-        if (transaction != null) {
-            transaction.setRollbackOnly();
-        }
-
-        return new CompoundDiff();
-    }
-
-    GraphDiff onSyncFlush(ObjectContext originatingContext, GraphDiff childChanges) {
-
-        if (!(originatingContext instanceof DataContext)) {
-            throw new CayenneRuntimeException(
-                    "No support for committing ObjectContexts that are not DataContexts yet. "
-                            + "Unsupported context: " + originatingContext);
-        }
-
-        DataDomainFlushAction action = new DataDomainFlushAction(this);
-        action.setJdbcEventLogger(jdbcEventLogger);
-
-        return action.flush((DataContext) originatingContext, childChanges);
-    }
-
-    @Override
-    public String toString() {
-        return new ToStringBuilder(this).append("name", name).toString();
-    }
-
-    /**
-     * Returns shared {@link QueryCache} used by this DataDomain.
-     * 
-     * @since 3.0
-     */
-    public QueryCache getQueryCache() {
-        return queryCache;
-    }
-
-    public void setQueryCache(QueryCache queryCache) {
-        this.queryCache = queryCache;
-    }
-
-    /**
-     * @since 3.1
-     */
-    JdbcEventLogger getJdbcEventLogger() {
-        return jdbcEventLogger;
-    }
-
-    void refreshEntitySorter() {
-        if (entitySorter != null) {
-            entitySorter.setEntityResolver(getEntityResolver());
-        }
-    }
-
-    /**
-     * Returns an unmodifiable list of filters registered with this DataDomain.
-     * <p>
-     * Filter ordering note: filters are applied in reverse order of their
-     * occurrence in the filter list. I.e. the last filter in the list called
-     * first in the chain.
-     * 
-     * @since 3.1
-     */
-    public List<DataChannelFilter> getFilters() {
-        return Collections.unmodifiableList(filters);
-    }
-
-    /**
-     * Adds a new filter, immediately calling its 'init' method. Since 4.0 this
-     * method also registers passed filter as an event listener, if any of its
-     * methods have event annotations.
-     * 
-     * @since 3.1
-     */
-    public void addFilter(DataChannelFilter filter) {
-        filter.init(this);
-        getEntityResolver().getCallbackRegistry().addListener(filter);
-        filters.add(filter);
-    }
-
-    /**
-     * Removes a filter from the filter chain.
-     * 
-     * @since 3.1
-     */
-    public void removeFilter(DataChannelFilter filter) {
-        filters.remove(filter);
-    }
-
-    /**
-     * Adds a listener, mapping its methods to events based on annotations. This
-     * is a shortcut for
-     * 'getEntityResolver().getCallbackRegistry().addListener(listener)'.
-     * 
-     * @since 4.0
-     */
-    public void addListener(Object listener) {
-        getEntityResolver().getCallbackRegistry().addListener(listener);
-    }
-
-    abstract class DataDomainFilterChain implements DataChannelFilterChain {
-
-        private int i;
-
-        DataDomainFilterChain() {
-            i = filters != null ? filters.size() : 0;
-        }
-
-        DataChannelFilter nextFilter() {
-            // filters are ordered innermost to outermost
-            i--;
-            return i >= 0 ? filters.get(i) : null;
-        }
-    }
-
-    final class DataDomainQueryFilterChain extends DataDomainFilterChain {
-
-        @Override
-        public QueryResponse onQuery(ObjectContext originatingContext, Query query) {
-
-            DataChannelFilter filter = nextFilter();
-            return (filter != null) ? filter.onQuery(originatingContext, query, this) : onQueryNoFilters(
-                    originatingContext, query);
-        }
-
-        @Override
-        public GraphDiff onSync(ObjectContext originatingContext, GraphDiff changes, int syncType) {
-            throw new UnsupportedOperationException("It is illegal to call 'onSync' inside 'onQuery' chain");
-        }
-    }
-
-    final class DataDomainSyncFilterChain extends DataDomainFilterChain {
-
-        @Override
-        public GraphDiff onSync(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
-
-            DataChannelFilter filter = nextFilter();
-            return (filter != null) ? filter.onSync(originatingContext, changes, syncType, this) : onSyncNoFilters(
-                    originatingContext, changes, syncType);
-        }
-
-        @Override
-        public QueryResponse onQuery(ObjectContext originatingContext, Query query) {
-            throw new UnsupportedOperationException("It is illegal to call 'onQuery' inside 'onSync' chain");
-        }
-    }
-
-    /**
-     * An optional DataNode that is used for DataMaps that are not linked to a
-     * DataNode explicitly.
-     * 
-     * @since 3.1
-     */
-    public DataNode getDefaultNode() {
-        return defaultNode;
-    }
-
-    /**
-     * @since 3.1
-     */
-    public void setDefaultNode(DataNode defaultNode) {
-        this.defaultNode = defaultNode;
-    }
-
-    /**
-     * Returns a maximum number of object IDs to match in a single query for
-     * queries that select objects based on collection of ObjectIds. This
-     * affects queries generated by Cayenne when processing paginated queries
-     * and DISJOINT_BY_ID prefetches and is intended to address database
-     * limitations on the size of SQL statements as well as to cap memory use in
-     * Cayenne when generating such queries. The default is 10000. It can be
-     * changed either by calling {@link #setMaxIdQualifierSize(int)} or changing
-     * the value for property
-     * {@link Constants#SERVER_MAX_ID_QUALIFIER_SIZE_PROPERTY}.
-     * 
-     * @since 3.1
-     */
-    public int getMaxIdQualifierSize() {
-        return maxIdQualifierSize;
-    }
-
-    /**
-     * @since 3.1
-     */
-    public void setMaxIdQualifierSize(int maxIdQualifierSize) {
-        this.maxIdQualifierSize = maxIdQualifierSize;
-    }
-
-    TransactionManager getTransactionManager() {
-        return transactionManager;
-    }
+	public static final String SHARED_CACHE_ENABLED_PROPERTY = "cayenne.DataDomain.sharedCache";
+	public static final boolean SHARED_CACHE_ENABLED_DEFAULT = true;
+
+	public static final String VALIDATING_OBJECTS_ON_COMMIT_PROPERTY = "cayenne.DataDomain.validatingObjectsOnCommit";
+	public static final boolean VALIDATING_OBJECTS_ON_COMMIT_DEFAULT = true;
+
+	/**
+	 * @deprecated since 4.0 See {@link Constants#SERVER_EXTERNAL_TX_PROPERTY}.
+	 */
+	@Deprecated
+	public static final String USING_EXTERNAL_TRANSACTIONS_PROPERTY = "cayenne.DataDomain.usingExternalTransactions";
+
+	/**
+	 * @deprecated since 4.0 See {@link Constants#SERVER_EXTERNAL_TX_PROPERTY}.
+	 */
+	@Deprecated
+	public static final boolean USING_EXTERNAL_TRANSACTIONS_DEFAULT = false;
+
+	/**
+	 * @since 3.1
+	 */
+	@Inject
+	protected JdbcEventLogger jdbcEventLogger;
+
+	/**
+	 * @since 4.0
+	 */
+	@Inject
+	protected TransactionManager transactionManager;
+
+	/**
+	 * @since 3.1
+	 */
+	protected int maxIdQualifierSize;
+
+	/**
+	 * @since 3.1
+	 */
+	protected List<DataChannelFilter> filters;
+
+	protected Map<String, DataNode> nodes;
+	protected Map<String, DataNode> nodesByDataMapName;
+	protected DataNode defaultNode;
+	protected Map<String, String> properties;
+
+	protected EntityResolver entityResolver;
+	protected DataRowStore sharedSnapshotCache;
+	protected String name;
+	protected QueryCache queryCache;
+
+	// these are initialized from properties...
+	protected boolean sharedCacheEnabled;
+	protected boolean validatingObjectsOnCommit;
+
+	/**
+	 * @since 1.2
+	 */
+	protected EventManager eventManager;
+
+	/**
+	 * @since 1.2
+	 */
+	protected EntitySorter entitySorter;
+
+	protected boolean stopped;
+
+	/**
+	 * Creates a DataDomain and assigns it a name.
+	 */
+	public DataDomain(String name) {
+		init(name);
+		resetProperties();
+	}
+
+	/**
+	 * Creates new DataDomain.
+	 * 
+	 * @param name
+	 *            DataDomain name. Domain can be located using its name in the
+	 *            Configuration object.
+	 * @param properties
+	 *            A Map containing domain configuration properties.
+	 * @deprecated since 4.0 unused
+	 */
+	@Deprecated
+	public DataDomain(String name, Map properties) {
+		init(name);
+		initWithProperties(properties);
+	}
+
+	private void init(String name) {
+
+		this.filters = new CopyOnWriteArrayList<DataChannelFilter>();
+		this.nodesByDataMapName = new ConcurrentHashMap<>();
+		this.nodes = new ConcurrentHashMap<>();
+
+		// properties are read-only, so no need for concurrent map, or any
+		// specific map
+		// for that matter
+		this.properties = Collections.EMPTY_MAP;
+
+		setName(name);
+	}
+
+	/**
+	 * Checks that Domain is not stopped. Throws DomainStoppedException
+	 * otherwise.
+	 * 
+	 * @since 3.0
+	 */
+	protected void checkStopped() throws DomainStoppedException {
+		if (stopped) {
+			throw new DomainStoppedException("Domain " + name
+					+ " was shutdown and can no longer be used to access the database");
+		}
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	public EntitySorter getEntitySorter() {
+		return entitySorter;
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	public void setEntitySorter(EntitySorter entitySorter) {
+		this.entitySorter = entitySorter;
+	}
+
+	/**
+	 * @since 1.1
+	 */
+	protected void resetProperties() {
+		properties = Collections.EMPTY_MAP;
+
+		sharedCacheEnabled = SHARED_CACHE_ENABLED_DEFAULT;
+		validatingObjectsOnCommit = VALIDATING_OBJECTS_ON_COMMIT_DEFAULT;
+	}
+
+	/**
+	 * Reinitializes domain state with a new set of properties.
+	 * 
+	 * @since 1.1
+	 * @deprecated since 4.0 properties are processed by the DI provider.
+	 */
+	@Deprecated
+	public void initWithProperties(Map<String, String> properties) {
+
+		// clone properties to ensure that it is read-only internally
+		properties = properties != null ? new HashMap<>(properties) : Collections.EMPTY_MAP;
+
+		String sharedCacheEnabled = properties.get(SHARED_CACHE_ENABLED_PROPERTY);
+		String validatingObjectsOnCommit = properties.get(VALIDATING_OBJECTS_ON_COMMIT_PROPERTY);
+
+		// init ivars from properties
+		this.sharedCacheEnabled = (sharedCacheEnabled != null) ? "true".equalsIgnoreCase(sharedCacheEnabled)
+				: SHARED_CACHE_ENABLED_DEFAULT;
+		this.validatingObjectsOnCommit = (validatingObjectsOnCommit != null) ? "true"
+				.equalsIgnoreCase(validatingObjectsOnCommit) : VALIDATING_OBJECTS_ON_COMMIT_DEFAULT;
+
+		this.properties = properties;
+	}
+
+	/**
+	 * Returns EventManager used by this DataDomain.
+	 * 
+	 * @since 1.2
+	 */
+	public EventManager getEventManager() {
+		return eventManager;
+	}
+
+	/**
+	 * Sets EventManager used by this DataDomain.
+	 * 
+	 * @since 1.2
+	 */
+	public void setEventManager(EventManager eventManager) {
+		this.eventManager = eventManager;
+
+		if (sharedSnapshotCache != null) {
+			sharedSnapshotCache.setEventManager(eventManager);
+		}
+	}
+
+	/**
+	 * Returns "name" property value.
+	 */
+	public String getName() {
+		return name;
+	}
+
+	/**
+	 * Sets "name" property to a new value.
+	 */
+	public synchronized void setName(String name) {
+		this.name = name;
+		if (sharedSnapshotCache != null) {
+			this.sharedSnapshotCache.setName(name);
+		}
+	}
+
+	/**
+	 * Returns <code>true</code> if DataContexts produced by this DataDomain are
+	 * using shared DataRowStore. Returns <code>false</code> if each DataContext
+	 * would work with its own DataRowStore. Note that this setting can be
+	 * overwritten per DataContext.
+	 */
+	public boolean isSharedCacheEnabled() {
+		return sharedCacheEnabled;
+	}
+
+	public void setSharedCacheEnabled(boolean sharedCacheEnabled) {
+		this.sharedCacheEnabled = sharedCacheEnabled;
+	}
+
+	/**
+	 * Returns whether child DataContexts default behavior is to perform object
+	 * validation before commit is executed.
+	 * 
+	 * @since 1.1
+	 */
+	public boolean isValidatingObjectsOnCommit() {
+		return validatingObjectsOnCommit;
+	}
+
+	/**
+	 * Sets the property defining whether child DataContexts should perform
+	 * object validation before commit is executed.
+	 * 
+	 * @since 1.1
+	 */
+	public void setValidatingObjectsOnCommit(boolean flag) {
+		this.validatingObjectsOnCommit = flag;
+	}
+
+	/**
+	 * @since 1.1
+	 * @return a Map of properties for this DataDomain.
+	 */
+	public Map<String, String> getProperties() {
+		return properties;
+	}
+
+	/**
+	 * Returns snapshots cache for this DataDomain, lazily initializing it on
+	 * the first call if 'sharedCacheEnabled' flag is true.
+	 */
+	public DataRowStore getSharedSnapshotCache() {
+		if (sharedSnapshotCache == null && sharedCacheEnabled) {
+			this.sharedSnapshotCache = nonNullSharedSnapshotCache();
+		}
+
+		return sharedSnapshotCache;
+	}
+
+	/**
+	 * Returns a guaranteed non-null shared snapshot cache regardless of the
+	 * 'sharedCacheEnabled' flag setting.
+	 */
+	synchronized DataRowStore nonNullSharedSnapshotCache() {
+		if (sharedSnapshotCache == null) {
+			this.sharedSnapshotCache = new DataRowStore(name, properties, eventManager);
+		}
+
+		return sharedSnapshotCache;
+	}
+
+	/**
+	 * Shuts down the previous cache instance, sets cache to the new
+	 * DataSowStore instance and updates two properties of the new DataSowStore:
+	 * name and eventManager.
+	 */
+	public synchronized void setSharedSnapshotCache(DataRowStore snapshotCache) {
+		if (this.sharedSnapshotCache != snapshotCache) {
+			if (this.sharedSnapshotCache != null) {
+				this.sharedSnapshotCache.shutdown();
+			}
+			this.sharedSnapshotCache = snapshotCache;
+
+			if (snapshotCache != null) {
+				snapshotCache.setEventManager(getEventManager());
+				snapshotCache.setName(getName());
+			}
+		}
+	}
+
+	public void addDataMap(DataMap dataMap) {
+		getEntityResolver().addDataMap(dataMap);
+		refreshEntitySorter();
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	public DataMap getDataMap(String mapName) {
+		return getEntityResolver().getDataMap(mapName);
+	}
+
+	/**
+	 * Removes named DataMap from this DataDomain and any underlying DataNodes
+	 * that include it.
+	 * 
+	 * @since 3.1
+	 */
+	public void removeDataMap(String mapName) {
+		DataMap map = getDataMap(mapName);
+		if (map == null) {
+			return;
+		}
+
+		// remove from data nodes
+		for (DataNode node : nodes.values()) {
+			node.removeDataMap(mapName);
+		}
+
+		nodesByDataMapName.remove(mapName);
+
+		// remove from EntityResolver
+		getEntityResolver().removeDataMap(map);
+
+		refreshEntitySorter();
+	}
+
+	/**
+	 * Removes a DataNode from DataDomain. Any maps previously associated with
+	 * this node within domain will still be kept around, however they wan't be
+	 * mapped to any node.
+	 */
+	public void removeDataNode(String nodeName) {
+		DataNode removed = nodes.remove(nodeName);
+		if (removed != null) {
+
+			removed.setEntityResolver(null);
+
+			Iterator<DataNode> it = nodesByDataMapName.values().iterator();
+			while (it.hasNext()) {
+				if (it.next() == removed) {
+					it.remove();
+				}
+			}
+		}
+	}
+
+	/**
+	 * Returns a collection of registered DataMaps.
+	 */
+	public Collection<DataMap> getDataMaps() {
+		return getEntityResolver().getDataMaps();
+	}
+
+	/**
+	 * Returns an unmodifiable collection of DataNodes associated with this
+	 * domain.
+	 */
+	public Collection<DataNode> getDataNodes() {
+		return Collections.unmodifiableCollection(nodes.values());
+	}
+
+	/**
+	 * Adds new DataNode.
+	 */
+	public void addNode(DataNode node) {
+
+		// add node to name->node map
+		nodes.put(node.getName(), node);
+		node.setEntityResolver(getEntityResolver());
+
+		// add node to "ent name->node" map
+		for (DataMap map : node.getDataMaps()) {
+			addDataMap(map);
+			nodesByDataMapName.put(map.getName(), node);
+		}
+	}
+
+	/**
+	 * Returns registered DataNode whose name matches <code>name</code>
+	 * parameter.
+	 * 
+	 * @since 3.1
+	 */
+	public DataNode getDataNode(String nodeName) {
+		return nodes.get(nodeName);
+	}
+
+	/**
+	 * Returns a DataNode that should handle queries for all entities in a
+	 * DataMap.
+	 * 
+	 * @since 1.1
+	 */
+	public DataNode lookupDataNode(DataMap map) {
+
+		DataNode node = nodesByDataMapName.get(map.getName());
+		if (node == null) {
+
+			// see if one of the node states has changed, and the map is now
+			// linked...
+			for (DataNode n : getDataNodes()) {
+				for (DataMap m : n.getDataMaps()) {
+					if (m == map) {
+						nodesByDataMapName.put(map.getName(), n);
+						node = n;
+						break;
+					}
+				}
+
+				if (node != null) {
+					break;
+				}
+			}
+
+			if (node == null) {
+
+				if (defaultNode != null) {
+					nodesByDataMapName.put(map.getName(), defaultNode);
+					node = defaultNode;
+				} else {
+					throw new CayenneRuntimeException("No DataNode configured for DataMap '" + map.getName()
+							+ "' and no default DataNode set");
+				}
+			}
+		}
+
+		return node;
+	}
+
+	/**
+	 * Sets EntityResolver. If not set explicitly, DataDomain creates a default
+	 * EntityResolver internally on demand.
+	 * 
+	 * @since 1.1
+	 */
+	public void setEntityResolver(EntityResolver entityResolver) {
+		this.entityResolver = entityResolver;
+	}
+
+	// creates default entity resolver if there is none set yet
+	private synchronized void createEntityResolver() {
+		if (entityResolver == null) {
+			// entity resolver will be self-indexing as we add all our maps
+			// to it as they are added to the DataDomain
+			entityResolver = new EntityResolver();
+		}
+	}
+
+	/**
+	 * Shutdowns all owned data nodes and marks this domain as stopped.
+	 */
+	@BeforeScopeEnd
+	public void shutdown() {
+		if (!stopped) {
+			stopped = true;
+
+			if (sharedSnapshotCache != null) {
+				sharedSnapshotCache.shutdown();
+			}
+		}
+	}
+
+	/**
+	 * Routes queries to appropriate DataNodes for execution.
+	 */
+	public void performQueries(final Collection<? extends Query> queries, final OperationObserver callback) {
+
+		transactionManager.performInTransaction(new TransactionalOperation<Object>() {
+			@Override
+			public Object perform() {
+				new DataDomainLegacyQueryAction(DataDomain.this, new QueryChain(queries), callback).execute();
+				return null;
+			}
+		});
+	}
+
+	// ****** DataChannel methods:
+
+	/**
+	 * Runs query returning generic QueryResponse.
+	 * 
+	 * @since 1.2
+	 */
+	@Override
+	public QueryResponse onQuery(final ObjectContext originatingContext, final Query query) {
+		checkStopped();
+
+		return new DataDomainQueryFilterChain().onQuery(originatingContext, query);
+	}
+
+	QueryResponse onQueryNoFilters(final ObjectContext originatingContext, final Query query) {
+		// transaction note:
+		// we don't wrap this code in transaction to reduce transaction scope to
+		// just the DB operation for better performance ... query action will
+		// start a transaction itself when and if needed
+		return new DataDomainQueryAction(originatingContext, DataDomain.this, query).execute();
+	}
+
+	/**
+	 * Returns an EntityResolver that stores mapping information for this
+	 * domain.
+	 */
+	@Override
+	public EntityResolver getEntityResolver() {
+		if (entityResolver == null) {
+			createEntityResolver();
+		}
+
+		return entityResolver;
+	}
+
+	/**
+	 * Only handles commit-type synchronization, ignoring any other type.
+	 * 
+	 * @since 1.2
+	 */
+	@Override
+	public GraphDiff onSync(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
+
+		checkStopped();
+
+		return new DataDomainSyncFilterChain().onSync(originatingContext, changes, syncType);
+	}
+
+	GraphDiff onSyncNoFilters(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
+		DataChannelSyncCallbackAction callbackAction = DataChannelSyncCallbackAction.getCallbackAction(
+				getEntityResolver().getCallbackRegistry(), originatingContext.getGraphManager(), changes, syncType);
+
+		callbackAction.applyPreCommit();
+
+		GraphDiff result;
+		switch (syncType) {
+		case DataChannel.ROLLBACK_CASCADE_SYNC:
+			result = onSyncRollback(originatingContext);
+			break;
+		// "cascade" and "no_cascade" are the same from the DataDomain
+		// perspective,
+		// including transaction handling logic
+		case DataChannel.FLUSH_NOCASCADE_SYNC:
+		case DataChannel.FLUSH_CASCADE_SYNC:
+			result = transactionManager.performInTransaction(new TransactionalOperation<GraphDiff>() {
+				@Override
+				public GraphDiff perform() {
+					return onSyncFlush(originatingContext, changes);
+				}
+			});
+
+			break;
+		default:
+			throw new CayenneRuntimeException("Invalid synchronization type: " + syncType);
+		}
+
+		callbackAction.applyPostCommit();
+		return result;
+	}
+
+	GraphDiff onSyncRollback(ObjectContext originatingContext) {
+		// if there is a transaction in progress, roll it back
+
+		Transaction transaction = BaseTransaction.getThreadTransaction();
+		if (transaction != null) {
+			transaction.setRollbackOnly();
+		}
+
+		return new CompoundDiff();
+	}
+
+	GraphDiff onSyncFlush(ObjectContext originatingContext, GraphDiff childChanges) {
+
+		if (!(originatingContext instanceof DataContext)) {
+			throw new CayenneRuntimeException(
+					"No support for committing ObjectContexts that are not DataContexts yet. "
+							+ "Unsupported context: " + originatingContext);
+		}
+
+		DataDomainFlushAction action = new DataDomainFlushAction(this);
+		action.setJdbcEventLogger(jdbcEventLogger);
+
+		return action.flush((DataContext) originatingContext, childChanges);
+	}
+
+	@Override
+	public String toString() {
+		return new ToStringBuilder(this).append("name", name).toString();
+	}
+
+	/**
+	 * Returns shared {@link QueryCache} used by this DataDomain.
+	 * 
+	 * @since 3.0
+	 */
+	public QueryCache getQueryCache() {
+		return queryCache;
+	}
+
+	public void setQueryCache(QueryCache queryCache) {
+		this.queryCache = queryCache;
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	JdbcEventLogger getJdbcEventLogger() {
+		return jdbcEventLogger;
+	}
+
+	void refreshEntitySorter() {
+		if (entitySorter != null) {
+			entitySorter.setEntityResolver(getEntityResolver());
+		}
+	}
+
+	/**
+	 * Returns an unmodifiable list of filters registered with this DataDomain.
+	 * <p>
+	 * Filter ordering note: filters are applied in reverse order of their
+	 * occurrence in the filter list. I.e. the last filter in the list called
+	 * first in the chain.
+	 * 
+	 * @since 3.1
+	 */
+	public List<DataChannelFilter> getFilters() {
+		return Collections.unmodifiableList(filters);
+	}
+
+	/**
+	 * Adds a new filter, immediately calling its 'init' method. Since 4.0 this
+	 * method also registers passed filter as an event listener, if any of its
+	 * methods have event annotations.
+	 * 
+	 * @since 3.1
+	 */
+	public void addFilter(DataChannelFilter filter) {
+		filter.init(this);
+		getEntityResolver().getCallbackRegistry().addListener(filter);
+		filters.add(filter);
+	}
+
+	/**
+	 * Removes a filter from the filter chain.
+	 * 
+	 * @since 3.1
+	 */
+	public void removeFilter(DataChannelFilter filter) {
+		filters.remove(filter);
+	}
+
+	/**
+	 * Adds a listener, mapping its methods to events based on annotations. This
+	 * is a shortcut for
+	 * 'getEntityResolver().getCallbackRegistry().addListener(listener)'.
+	 * 
+	 * @since 4.0
+	 */
+	public void addListener(Object listener) {
+		getEntityResolver().getCallbackRegistry().addListener(listener);
+	}
+
+	abstract class DataDomainFilterChain implements DataChannelFilterChain {
+
+		private int i;
+
+		DataDomainFilterChain() {
+			i = filters != null ? filters.size() : 0;
+		}
+
+		DataChannelFilter nextFilter() {
+			// filters are ordered innermost to outermost
+			i--;
+			return i >= 0 ? filters.get(i) : null;
+		}
+	}
+
+	final class DataDomainQueryFilterChain extends DataDomainFilterChain {
+
+		@Override
+		public QueryResponse onQuery(ObjectContext originatingContext, Query query) {
+
+			DataChannelFilter filter = nextFilter();
+			return (filter != null) ? filter.onQuery(originatingContext, query, this) : onQueryNoFilters(
+					originatingContext, query);
+		}
+
+		@Override
+		public GraphDiff onSync(ObjectContext originatingContext, GraphDiff changes, int syncType) {
+			throw new UnsupportedOperationException("It is illegal to call 'onSync' inside 'onQuery' chain");
+		}
+	}
+
+	final class DataDomainSyncFilterChain extends DataDomainFilterChain {
+
+		@Override
+		public GraphDiff onSync(final ObjectContext originatingContext, final GraphDiff changes, int syncType) {
+
+			DataChannelFilter filter = nextFilter();
+			return (filter != null) ? filter.onSync(originatingContext, changes, syncType, this) : onSyncNoFilters(
+					originatingContext, changes, syncType);
+		}
+
+		@Override
+		public QueryResponse onQuery(ObjectContext originatingContext, Query query) {
+			throw new UnsupportedOperationException("It is illegal to call 'onQuery' inside 'onSync' chain");
+		}
+	}
+
+	/**
+	 * An optional DataNode that is used for DataMaps that are not linked to a
+	 * DataNode explicitly.
+	 * 
+	 * @since 3.1
+	 */
+	public DataNode getDefaultNode() {
+		return defaultNode;
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	public void setDefaultNode(DataNode defaultNode) {
+		this.defaultNode = defaultNode;
+	}
+
+	/**
+	 * Returns a maximum number of object IDs to match in a single query for
+	 * queries that select objects based on collection of ObjectIds. This
+	 * affects queries generated by Cayenne when processing paginated queries
+	 * and DISJOINT_BY_ID prefetches and is intended to address database
+	 * limitations on the size of SQL statements as well as to cap memory use in
+	 * Cayenne when generating such queries. The default is 10000. It can be
+	 * changed either by calling {@link #setMaxIdQualifierSize(int)} or changing
+	 * the value for property
+	 * {@link Constants#SERVER_MAX_ID_QUALIFIER_SIZE_PROPERTY}.
+	 * 
+	 * @since 3.1
+	 */
+	public int getMaxIdQualifierSize() {
+		return maxIdQualifierSize;
+	}
+
+	/**
+	 * @since 3.1
+	 */
+	public void setMaxIdQualifierSize(int maxIdQualifierSize) {
+		this.maxIdQualifierSize = maxIdQualifierSize;
+	}
+
+	TransactionManager getTransactionManager() {
+		return transactionManager;
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainDBDiffBuilder.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainDBDiffBuilder.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainDBDiffBuilder.java
index 3c24898..c278a78 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainDBDiffBuilder.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainDBDiffBuilder.java
@@ -82,7 +82,7 @@ class DataDomainDBDiffBuilder implements GraphChangeHandler {
             return null;
         }
 
-        Map<String, Object> dbDiff = new HashMap<String, Object>();
+        Map<String, Object> dbDiff = new HashMap<>();
 
         appendSimpleProperties(dbDiff);
         appendForeignKeys(dbDiff);

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainInsertBucket.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainInsertBucket.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainInsertBucket.java
index 2fe35ef..dbe9a70 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainInsertBucket.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DataDomainInsertBucket.java
@@ -79,7 +79,7 @@ class DataDomainInsertBucket extends DataDomainSyncBucket {
                     // we need to insert even if there is no changes to default
                     // values so creating an empty changes map
                     if (snapshot == null) {
-                        snapshot = new HashMap<String, Object>();
+                        snapshot = new HashMap<>();
                     }
 
                     batch.add(snapshot, o.getObjectId());

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DataNode.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DataNode.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DataNode.java
index 84c72f5..66651be 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DataNode.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DataNode.java
@@ -96,7 +96,7 @@ public class DataNode implements QueryEngine {
 	public DataNode(String name) {
 
 		this.name = name;
-		this.dataMaps = new HashMap<String, DataMap>();
+		this.dataMaps = new HashMap<>();
 		this.readThroughDataSource = new TransactionDataSource();
 
 		// make sure logger is not null

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DataNodeSyncQualifierDescriptor.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DataNodeSyncQualifierDescriptor.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DataNodeSyncQualifierDescriptor.java
index 4d037ac..8ec6705 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DataNodeSyncQualifierDescriptor.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DataNodeSyncQualifierDescriptor.java
@@ -41,147 +41,140 @@ import org.apache.commons.collections.Transformer;
  */
 class DataNodeSyncQualifierDescriptor {
 
-    private List<DbAttribute> attributes;
-    private List<Transformer> valueTransformers;
-    private boolean usingOptimisticLocking;
-
-    public boolean isUsingOptimisticLocking() {
-        return usingOptimisticLocking;
-    }
-
-    List<DbAttribute> getAttributes() {
-        return attributes;
-    }
-
-    Map<String, Object> createQualifierSnapshot(ObjectDiff diff) {
-        int len = attributes.size();
-
-        Map<String, Object> map = new HashMap<String, Object>(len * 2);
-        for (int i = 0; i < len; i++) {
-            DbAttribute attribute = attributes.get(i);
-            if (!map.containsKey(attribute.getName())) {
-
-                Object value = valueTransformers.get(i).transform(diff);
-                map.put(attribute.getName(), value);
-            }
-        }
-
-        return map;
-    }
-
-    void reset(DbEntityClassDescriptor descriptor) {
-
-        attributes = new ArrayList<DbAttribute>(3);
-        valueTransformers = new ArrayList<Transformer>(3);
-        usingOptimisticLocking = descriptor.getEntity().getLockType() == ObjEntity.LOCK_TYPE_OPTIMISTIC;
-
-        // master PK columns
-        if (descriptor.isMaster()) {
-            for (final DbAttribute attribute : descriptor.getDbEntity().getPrimaryKeys()) {
-                attributes.add(attribute);
-                valueTransformers.add(new Transformer() {
-
-                    public Object transform(Object input) {
-                        ObjectId id = (ObjectId) ((ObjectDiff) input).getNodeId();
-                        return id.getIdSnapshot().get(attribute.getName());
-                    }
-                });
-            }
-        }
-        else {
-
-            // TODO: andrus 12/23/2007 - only one step relationship is supported...
-            if (descriptor.getPathFromMaster().size() != 1) {
-                throw new CayenneRuntimeException(
-                        "Only single step dependent relationships are currently supported. Actual path length: "
-                                + descriptor.getPathFromMaster().size());
-            }
-            
-            DbRelationship masterDependentDbRel = descriptor.getPathFromMaster().get(0);
-
-            if (masterDependentDbRel != null) {
-                for (final DbJoin dbAttrPair : masterDependentDbRel.getJoins()) {
-                    DbAttribute dbAttribute = dbAttrPair.getTarget();
-                    if (!attributes.contains(dbAttribute)) {
-
-                        attributes.add(dbAttribute);
-                        valueTransformers.add(new Transformer() {
-
-                            public Object transform(Object input) {
-                                ObjectId id = (ObjectId) ((ObjectDiff) input).getNodeId();
-                                return id.getIdSnapshot().get(dbAttrPair.getSourceName());
-                            }
-                        });
-                    }
-                }
-            }
-        }
-
-        if (usingOptimisticLocking) {
-
-            for (final ObjAttribute attribute : descriptor.getEntity().getAttributes()) {
-
-                if (attribute.isUsedForLocking()) {
-                    // only care about first step in a flattened attribute
-                    DbAttribute dbAttribute = (DbAttribute) attribute
-                            .getDbPathIterator()
-                            .next();
-
-                    if (!attributes.contains(dbAttribute)) {
-                        attributes.add(dbAttribute);
-
-                        valueTransformers.add(new Transformer() {
-
-                            public Object transform(Object input) {
-                                return ((ObjectDiff) input).getSnapshotValue(attribute
-                                        .getName());
-                            }
-                        });
-                    }
-                }
-            }
-
-            for (final ObjRelationship relationship : descriptor
-                    .getEntity()
-                    .getRelationships()) {
-
-                if (relationship.isUsedForLocking()) {
-                    // only care about the first DbRelationship
-                    DbRelationship dbRelationship = relationship
-                            .getDbRelationships()
-                            .get(0);
-
-                    for (final DbJoin dbAttrPair : dbRelationship.getJoins()) {
-                        DbAttribute dbAttribute = dbAttrPair.getSource();
-
-                        // relationship transformers override attribute transformers for
-                        // meaningful FK's... why meaningful FKs can go out of sync is
-                        // another story (CAY-595)
-                        int index = attributes.indexOf(dbAttribute);
-                        if (index >= 0 && !dbAttribute.isForeignKey()) {
-                            continue;
-                        }
-
-                        Transformer transformer = new Transformer() {
-
-                            public Object transform(Object input) {
-                                ObjectId targetId = ((ObjectDiff) input)
-                                        .getArcSnapshotValue(relationship.getName());
-                                return targetId != null ? targetId.getIdSnapshot().get(
-                                        dbAttrPair.getTargetName()) : null;
-                            }
-                        };
-
-                        if (index < 0) {
-                            attributes.add(dbAttribute);
-                            valueTransformers.add(transformer);
-                        }
-                        else {
-                            valueTransformers.set(index, transformer);
-                        }
-                    }
-                }
-            }
-        }
-    }
+	private List<DbAttribute> attributes;
+	private List<Transformer> valueTransformers;
+	private boolean usingOptimisticLocking;
+
+	public boolean isUsingOptimisticLocking() {
+		return usingOptimisticLocking;
+	}
+
+	List<DbAttribute> getAttributes() {
+		return attributes;
+	}
+
+	Map<String, Object> createQualifierSnapshot(ObjectDiff diff) {
+		int len = attributes.size();
+
+		Map<String, Object> map = new HashMap<>(len * 2);
+		for (int i = 0; i < len; i++) {
+			DbAttribute attribute = attributes.get(i);
+			if (!map.containsKey(attribute.getName())) {
+
+				Object value = valueTransformers.get(i).transform(diff);
+				map.put(attribute.getName(), value);
+			}
+		}
+
+		return map;
+	}
+
+	void reset(DbEntityClassDescriptor descriptor) {
+
+		attributes = new ArrayList<>(3);
+		valueTransformers = new ArrayList<>(3);
+		usingOptimisticLocking = descriptor.getEntity().getLockType() == ObjEntity.LOCK_TYPE_OPTIMISTIC;
+
+		// master PK columns
+		if (descriptor.isMaster()) {
+			for (final DbAttribute attribute : descriptor.getDbEntity().getPrimaryKeys()) {
+				attributes.add(attribute);
+				valueTransformers.add(new Transformer() {
+
+					public Object transform(Object input) {
+						ObjectId id = (ObjectId) ((ObjectDiff) input).getNodeId();
+						return id.getIdSnapshot().get(attribute.getName());
+					}
+				});
+			}
+		} else {
+
+			// TODO: andrus 12/23/2007 - only one step relationship is
+			// supported...
+			if (descriptor.getPathFromMaster().size() != 1) {
+				throw new CayenneRuntimeException(
+						"Only single step dependent relationships are currently supported. Actual path length: "
+								+ descriptor.getPathFromMaster().size());
+			}
+
+			DbRelationship masterDependentDbRel = descriptor.getPathFromMaster().get(0);
+
+			if (masterDependentDbRel != null) {
+				for (final DbJoin dbAttrPair : masterDependentDbRel.getJoins()) {
+					DbAttribute dbAttribute = dbAttrPair.getTarget();
+					if (!attributes.contains(dbAttribute)) {
+
+						attributes.add(dbAttribute);
+						valueTransformers.add(new Transformer() {
+
+							public Object transform(Object input) {
+								ObjectId id = (ObjectId) ((ObjectDiff) input).getNodeId();
+								return id.getIdSnapshot().get(dbAttrPair.getSourceName());
+							}
+						});
+					}
+				}
+			}
+		}
+
+		if (usingOptimisticLocking) {
+
+			for (final ObjAttribute attribute : descriptor.getEntity().getAttributes()) {
+
+				if (attribute.isUsedForLocking()) {
+					// only care about first step in a flattened attribute
+					DbAttribute dbAttribute = (DbAttribute) attribute.getDbPathIterator().next();
+
+					if (!attributes.contains(dbAttribute)) {
+						attributes.add(dbAttribute);
+
+						valueTransformers.add(new Transformer() {
+
+							public Object transform(Object input) {
+								return ((ObjectDiff) input).getSnapshotValue(attribute.getName());
+							}
+						});
+					}
+				}
+			}
+
+			for (final ObjRelationship relationship : descriptor.getEntity().getRelationships()) {
+
+				if (relationship.isUsedForLocking()) {
+					// only care about the first DbRelationship
+					DbRelationship dbRelationship = relationship.getDbRelationships().get(0);
+
+					for (final DbJoin dbAttrPair : dbRelationship.getJoins()) {
+						DbAttribute dbAttribute = dbAttrPair.getSource();
+
+						// relationship transformers override attribute
+						// transformers for
+						// meaningful FK's... why meaningful FKs can go out of
+						// sync is
+						// another story (CAY-595)
+						int index = attributes.indexOf(dbAttribute);
+						if (index >= 0 && !dbAttribute.isForeignKey()) {
+							continue;
+						}
+
+						Transformer transformer = new Transformer() {
+
+							public Object transform(Object input) {
+								ObjectId targetId = ((ObjectDiff) input).getArcSnapshotValue(relationship.getName());
+								return targetId != null ? targetId.getIdSnapshot().get(dbAttrPair.getTargetName())
+										: null;
+							}
+						};
+
+						if (index < 0) {
+							attributes.add(dbAttribute);
+							valueTransformers.add(transformer);
+						} else {
+							valueTransformers.set(index, transformer);
+						}
+					}
+				}
+			}
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DbGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DbGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DbGenerator.java
index b56cae0..51d63ae 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DbGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DbGenerator.java
@@ -155,9 +155,9 @@ public class DbGenerator {
 	 * NOT executed in this method.
 	 */
 	protected void buildStatements() {
-		dropTables = new HashMap<String, Collection<String>>();
-		createTables = new HashMap<String, String>();
-		createConstraints = new HashMap<String, List<String>>();
+		dropTables = new HashMap<>();
+		createTables = new HashMap<>();
+		createConstraints = new HashMap<>();
 
 		DbAdapter adapter = getAdapter();
 		for (final DbEntity dbe : this.dbEntitiesInInsertOrder) {

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DbGeneratorPostprocessor.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DbGeneratorPostprocessor.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DbGeneratorPostprocessor.java
index cc1eb58..ff8bd10 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DbGeneratorPostprocessor.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DbGeneratorPostprocessor.java
@@ -38,7 +38,7 @@ class DbGeneratorPostprocessor {
 	private static final Map<String, HSQLDBPostprocessor> postprocessors;
 
 	static {
-		postprocessors = new HashMap<String, HSQLDBPostprocessor>();
+		postprocessors = new HashMap<>();
 		postprocessors.put(HSQLDBAdapter.class.getName(), new HSQLDBPostprocessor());
 	}
 

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/DbLoader.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/DbLoader.java b/cayenne-server/src/main/java/org/apache/cayenne/access/DbLoader.java
index 7b1bce0..26cf333 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/DbLoader.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/DbLoader.java
@@ -266,7 +266,7 @@ public class DbLoader {
 		}
 
 		// Get all the foreign keys referencing this table
-		Map<String, DbEntity> tablesMap = new HashMap<String, DbEntity>();
+		Map<String, DbEntity> tablesMap = new HashMap<>();
 		for (DbEntity table : tables) {
 			tablesMap.put(table.getName(), table);
 		}
@@ -377,7 +377,7 @@ public class DbLoader {
 
 	private Map<String, Set<ExportedKey>> loadExportedKeys(DbLoaderConfiguration config, String catalog, String schema,
 			Map<String, DbEntity> tables) throws SQLException {
-		Map<String, Set<ExportedKey>> keys = new HashMap<String, Set<ExportedKey>>();
+		Map<String, Set<ExportedKey>> keys = new HashMap<>();
 
 		for (DbEntity dbEntity : tables.values()) {
 			if (!delegate.dbRelationship(dbEntity)) {
@@ -392,7 +392,7 @@ public class DbLoader {
 				LOGGER.info(
 						"Error getting relationships for '" + catalog + "." + schema + "', ignoring. "
 								+ cay182Ex.getMessage(), cay182Ex);
-				return new HashMap<String, Set<ExportedKey>>();
+				return new HashMap<>();
 			}
 
 			try {
@@ -737,7 +737,7 @@ public class DbLoader {
 	}
 
 	private Map<String, Procedure> loadProcedures(DbLoaderConfiguration config) throws SQLException {
-		Map<String, Procedure> procedures = new HashMap<String, Procedure>();
+		Map<String, Procedure> procedures = new HashMap<>();
 
 		FiltersConfig filters = config.getFiltersConfig();
 		for (CatalogFilter catalog : filters.catalogs) {
@@ -755,7 +755,7 @@ public class DbLoader {
 
 	private Map<String, Procedure> loadProcedures(FiltersConfig filters, String catalog, String schema)
 			throws SQLException {
-		Map<String, Procedure> procedures = new HashMap<String, Procedure>();
+		Map<String, Procedure> procedures = new HashMap<>();
 		// get procedures
 
 		try (ResultSet rs = getMetaData().getProcedures(catalog, schema, WILDCARD);) {

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/FlattenedArcKey.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/FlattenedArcKey.java b/cayenne-server/src/main/java/org/apache/cayenne/access/FlattenedArcKey.java
index 0be0eb1..d50c042 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/FlattenedArcKey.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/FlattenedArcKey.java
@@ -51,341 +51,314 @@ import org.apache.cayenne.util.Util;
  */
 final class FlattenedArcKey {
 
-    ObjRelationship relationship;
-
-    DbArcId id1;
-    DbArcId id2;
-
-    FlattenedArcKey(ObjectId sourceId, ObjectId destinationId,
-            ObjRelationship relationship) {
-
-        this.relationship = relationship;
-
-        List<DbRelationship> dbRelationships = relationship
-                .getDbRelationships();
-        if (dbRelationships.size() != 2) {
-            throw new CayenneRuntimeException(
-                    "Only single-step flattened relationships are supported in this operation, whereas the relationship '%s' has %s",
-                    relationship, dbRelationships.size());
-        }
-
-        DbRelationship r1 = dbRelationships.get(0);
-        DbRelationship r2 = dbRelationships.get(1).getReverseRelationship();
-
-        if (r2 == null) {
-            throw new IllegalStateException(
-                    "No reverse relationship for DbRelationship "
-                            + dbRelationships.get(1));
-        }
-
-        id1 = new DbArcId(sourceId, r1);
-        id2 = new DbArcId(destinationId, r2);
-    }
-
-    /**
-     * Returns a join DbEntity for the single-step flattened relationship.
-     */
-    DbEntity getJoinEntity() {
-        return id1.getEntity();
-    }
-
-    /**
-     * Returns a snapshot for join record for the single-step flattened
-     * relationship, generating value for the primary key column if it is not
-     * propagated via the relationships.
-     */
-    Map<String, Object> buildJoinSnapshotForInsert(DataNode node) {
-        Map<String, Object> snapshot = lazyJoinSnapshot();
-
-        boolean autoPkDone = false;
-        DbEntity joinEntity = getJoinEntity();
-
-        for (DbAttribute dbAttr : joinEntity.getPrimaryKeys()) {
-            String dbAttrName = dbAttr.getName();
-            if (snapshot.containsKey(dbAttrName)) {
-                continue;
-            }
-
-            DbAdapter adapter = node.getAdapter();
-
-            // skip db-generated... looks like we don't care about the actual PK
-            // value
-            // here, so no need to retrieve db-generated pk back to Java.
-            if (adapter.supportsGeneratedKeys() && dbAttr.isGenerated()) {
-                continue;
-            }
-
-            if (autoPkDone) {
-                throw new CayenneRuntimeException(
-                        "Primary Key autogeneration only works for a single attribute.");
-            }
-
-            // finally, use database generation mechanism
-            try {
-                Object pkValue = adapter.getPkGenerator().generatePk(node,
-                        dbAttr);
-                snapshot.put(dbAttrName, pkValue);
-                autoPkDone = true;
-            } catch (Exception ex) {
-                throw new CayenneRuntimeException("Error generating PK: "
-                        + ex.getMessage(), ex);
-            }
-        }
-
-        return snapshot;
-    }
-
-    /**
-     * Returns pk snapshots for join records for the single-step flattened
-     * relationship. Multiple joins between the same pair of objects are
-     * theoretically possible, so the return value is a list.
-     */
-    List buildJoinSnapshotsForDelete(DataNode node) {
-        Map snapshot = eagerJoinSnapshot();
-
-        DbEntity joinEntity = getJoinEntity();
-
-        boolean fetchKey = false;
-        for (DbAttribute dbAttr : joinEntity.getPrimaryKeys()) {
-            String dbAttrName = dbAttr.getName();
-            if (!snapshot.containsKey(dbAttrName)) {
-                fetchKey = true;
-                break;
-            }
-        }
-
-        if (!fetchKey) {
-            return Collections.singletonList(snapshot);
-        }
-
-        // ok, the key is not included in snapshot, must do the fetch...
-        // TODO: this should be optimized in the future, but now
-        // DeleteBatchQuery
-        // expects a PK snapshot, so we must provide it.
-
-        QuotingStrategy quoter = node.getAdapter().getQuotingStrategy();
-
-        StringBuilder sql = new StringBuilder("SELECT ");
-        Collection<DbAttribute> pk = joinEntity.getPrimaryKeys();
-        final List<DbAttribute> pkList = pk instanceof List ? (List<DbAttribute>) pk
-                : new ArrayList<DbAttribute>(pk);
-
-        for (int i = 0; i < pkList.size(); i++) {
-
-            if (i > 0) {
-                sql.append(", ");
-            }
-
-            DbAttribute attribute = pkList.get(i);
-
-            sql.append("#result('");
-            sql.append(quoter.quotedName(attribute));
-
-            // since the name of the column can potentially be quoted and
-            // use reserved keywords as name, let's specify generated column
-            // name parameters to ensure the query doesn't explode
-            sql.append("' '").append(TypesMapping.getJavaBySqlType(attribute.getType()));
-            sql.append("' '").append("pk").append(i);
-            sql.append("')");
-        }
-
-        sql.append(" FROM ").append(quoter.quotedFullyQualifiedName(joinEntity))
-                .append(" WHERE ");
-        int i = snapshot.size();
-        for (Object key : snapshot.keySet()) {
-            sql.append(quoter.quotedIdentifier(joinEntity, String.valueOf(key)))
-                    .append(" #bindEqual($").append(key).append(")");
-
-            if (--i > 0) {
-                sql.append(" AND ");
-            }
-        }
-
-        SQLTemplate query = new SQLTemplate(joinEntity.getDataMap(),
-                sql.toString(), true);
-        query.setParams(snapshot);
-
-        final List[] result = new List[1];
-
-        node.performQueries(Collections.singleton((Query) query),
-                new DefaultOperationObserver() {
-
-                    @Override
-                    public void nextRows(Query query, List dataRows) {
-
-                        if (!dataRows.isEmpty()) {
-                            // decode results...
-
-                            List<DataRow> fixedRows = new ArrayList<DataRow>(
-                                    dataRows.size());
-                            for (Object o : dataRows) {
-                                DataRow row = (DataRow) o;
-
-                                DataRow fixedRow = new DataRow(2);
-
-                                for (int i = 0; i < pkList.size(); i++) {
-                                    DbAttribute attribute = pkList.get(i);
-                                    fixedRow.put(attribute.getName(),
-                                            row.get("pk" + i));
-                                }
-
-                                fixedRows.add(fixedRow);
-                            }
-
-                            dataRows = fixedRows;
-                        }
-
-                        result[0] = dataRows;
-                    }
-
-                    @Override
-                    public void nextQueryException(Query query, Exception ex) {
-                        throw new CayenneRuntimeException(
-                                "Raising from query exception.", Util
-                                        .unwindException(ex));
-                    }
-
-                    @Override
-                    public void nextGlobalException(Exception ex) {
-                        throw new CayenneRuntimeException(
-                                "Raising from underlyingQueryEngine exception.",
-                                Util.unwindException(ex));
-                    }
-                });
-
-        return result[0];
-    }
-
-    @Override
-    public int hashCode() {
-        // order ids in array for hashcode consistency purposes. The actual
-        // order direction is not important, as long as it
-        // is consistent across invocations
-
-        int compare = id1.getSourceId().getEntityName()
-                .compareTo(id2.getSourceId().getEntityName());
-
-        if (compare == 0) {
-            compare = id1.getIncominArc().getName()
-                    .compareTo(id2.getIncominArc().getName());
-
-            if (compare == 0) {
-                // since ordering is mostly important for detecting equivalent
-                // FlattenedArc keys coming from 2 opposite directions, the name
-                // of ObjRelationship can be a good criteria
-
-                ObjRelationship or2 = relationship.getReverseRelationship();
-                compare = or2 != null ? relationship.getName().compareTo(
-                        or2.getName()) : 1;
-
-                // TODO: if(compare == 0) ??
-            }
-        }
-
-        DbArcId[] ordered;
-        if (compare < 0) {
-            ordered = new DbArcId[] { id1, id2 };
-        } else {
-            ordered = new DbArcId[] { id2, id1 };
-        }
-
-        return new HashCodeBuilder().append(ordered).toHashCode();
-    }
-
-    /**
-     * Defines equal based on whether the relationship is bidirectional.
-     */
-    @Override
-    public boolean equals(Object object) {
-
-        if (this == object) {
-            return true;
-        }
-
-        if (!(object instanceof FlattenedArcKey)) {
-            return false;
-        }
-
-        FlattenedArcKey key = (FlattenedArcKey) object;
-
-        // ignore id order in comparison
-        if (id1.equals(key.id1)) {
-            return id2.equals(key.id2);
-        } else if (id1.equals(key.id2)) {
-            return id2.equals(key.id1);
-        }
-
-        return false;
-    }
-
-    private Map eagerJoinSnapshot() {
-
-        List<DbRelationship> relList = relationship.getDbRelationships();
-        if (relList.size() != 2) {
-            throw new CayenneRuntimeException(
-                    "Only single-step flattened relationships are supported in this operation: "
-                            + relationship);
-        }
-
-        DbRelationship firstDbRel = relList.get(0);
-        DbRelationship secondDbRel = relList.get(1);
-
-        // here ordering of ids is determined by 'relationship', so use id1, id2
-        // instead of orderedIds
-        Map<String, ?> sourceId = id1.getSourceId().getIdSnapshot();
-        Map<String, ?> destinationId = id2.getSourceId().getIdSnapshot();
-
-        Map<String, Object> snapshot = new HashMap<String, Object>(
-                sourceId.size() + destinationId.size(), 1);
-        for (DbJoin join : firstDbRel.getJoins()) {
-            snapshot.put(join.getTargetName(),
-                    sourceId.get(join.getSourceName()));
-        }
-
-        for (DbJoin join : secondDbRel.getJoins()) {
-            snapshot.put(join.getSourceName(),
-                    destinationId.get(join.getTargetName()));
-        }
-
-        return snapshot;
-    }
-
-    private Map<String, Object> lazyJoinSnapshot() {
-
-        List<DbRelationship> relList = relationship.getDbRelationships();
-        if (relList.size() != 2) {
-            throw new CayenneRuntimeException(
-                    "Only single-step flattened relationships are supported in this operation: "
-                            + relationship);
-        }
-
-        DbRelationship firstDbRel = relList.get(0);
-        DbRelationship secondDbRel = relList.get(1);
-
-        List<DbJoin> fromSourceJoins = firstDbRel.getJoins();
-        List<DbJoin> toTargetJoins = secondDbRel.getJoins();
-
-        Map<String, Object> snapshot = new HashMap<String, Object>(
-                fromSourceJoins.size() + toTargetJoins.size(), 1);
-
-        // here ordering of ids is determined by 'relationship', so use id1, id2
-        // instead of orderedIds
-
-        for (int i = 0, numJoins = fromSourceJoins.size(); i < numJoins; i++) {
-            DbJoin join = fromSourceJoins.get(i);
-
-            Object value = new PropagatedValueFactory(id1.getSourceId(),
-                    join.getSourceName());
-            snapshot.put(join.getTargetName(), value);
-        }
-
-        for (int i = 0, numJoins = toTargetJoins.size(); i < numJoins; i++) {
-            DbJoin join = toTargetJoins.get(i);
-            Object value = new PropagatedValueFactory(id2.getSourceId(),
-                    join.getTargetName());
-            snapshot.put(join.getSourceName(), value);
-        }
-
-        return snapshot;
-    }
+	ObjRelationship relationship;
+
+	DbArcId id1;
+	DbArcId id2;
+
+	FlattenedArcKey(ObjectId sourceId, ObjectId destinationId, ObjRelationship relationship) {
+
+		this.relationship = relationship;
+
+		List<DbRelationship> dbRelationships = relationship.getDbRelationships();
+		if (dbRelationships.size() != 2) {
+			throw new CayenneRuntimeException(
+					"Only single-step flattened relationships are supported in this operation, whereas the relationship '%s' has %s",
+					relationship, dbRelationships.size());
+		}
+
+		DbRelationship r1 = dbRelationships.get(0);
+		DbRelationship r2 = dbRelationships.get(1).getReverseRelationship();
+
+		if (r2 == null) {
+			throw new IllegalStateException("No reverse relationship for DbRelationship " + dbRelationships.get(1));
+		}
+
+		id1 = new DbArcId(sourceId, r1);
+		id2 = new DbArcId(destinationId, r2);
+	}
+
+	/**
+	 * Returns a join DbEntity for the single-step flattened relationship.
+	 */
+	DbEntity getJoinEntity() {
+		return id1.getEntity();
+	}
+
+	/**
+	 * Returns a snapshot for join record for the single-step flattened
+	 * relationship, generating value for the primary key column if it is not
+	 * propagated via the relationships.
+	 */
+	Map<String, Object> buildJoinSnapshotForInsert(DataNode node) {
+		Map<String, Object> snapshot = lazyJoinSnapshot();
+
+		boolean autoPkDone = false;
+		DbEntity joinEntity = getJoinEntity();
+
+		for (DbAttribute dbAttr : joinEntity.getPrimaryKeys()) {
+			String dbAttrName = dbAttr.getName();
+			if (snapshot.containsKey(dbAttrName)) {
+				continue;
+			}
+
+			DbAdapter adapter = node.getAdapter();
+
+			// skip db-generated... looks like we don't care about the actual PK
+			// value
+			// here, so no need to retrieve db-generated pk back to Java.
+			if (adapter.supportsGeneratedKeys() && dbAttr.isGenerated()) {
+				continue;
+			}
+
+			if (autoPkDone) {
+				throw new CayenneRuntimeException("Primary Key autogeneration only works for a single attribute.");
+			}
+
+			// finally, use database generation mechanism
+			try {
+				Object pkValue = adapter.getPkGenerator().generatePk(node, dbAttr);
+				snapshot.put(dbAttrName, pkValue);
+				autoPkDone = true;
+			} catch (Exception ex) {
+				throw new CayenneRuntimeException("Error generating PK: " + ex.getMessage(), ex);
+			}
+		}
+
+		return snapshot;
+	}
+
+	/**
+	 * Returns pk snapshots for join records for the single-step flattened
+	 * relationship. Multiple joins between the same pair of objects are
+	 * theoretically possible, so the return value is a list.
+	 */
+	List buildJoinSnapshotsForDelete(DataNode node) {
+		Map snapshot = eagerJoinSnapshot();
+
+		DbEntity joinEntity = getJoinEntity();
+
+		boolean fetchKey = false;
+		for (DbAttribute dbAttr : joinEntity.getPrimaryKeys()) {
+			String dbAttrName = dbAttr.getName();
+			if (!snapshot.containsKey(dbAttrName)) {
+				fetchKey = true;
+				break;
+			}
+		}
+
+		if (!fetchKey) {
+			return Collections.singletonList(snapshot);
+		}
+
+		// ok, the key is not included in snapshot, must do the fetch...
+		// TODO: this should be optimized in the future, but now
+		// DeleteBatchQuery
+		// expects a PK snapshot, so we must provide it.
+
+		QuotingStrategy quoter = node.getAdapter().getQuotingStrategy();
+
+		StringBuilder sql = new StringBuilder("SELECT ");
+		Collection<DbAttribute> pk = joinEntity.getPrimaryKeys();
+		final List<DbAttribute> pkList = pk instanceof List ? (List<DbAttribute>) pk : new ArrayList<DbAttribute>(pk);
+
+		for (int i = 0; i < pkList.size(); i++) {
+
+			if (i > 0) {
+				sql.append(", ");
+			}
+
+			DbAttribute attribute = pkList.get(i);
+
+			sql.append("#result('");
+			sql.append(quoter.quotedName(attribute));
+
+			// since the name of the column can potentially be quoted and
+			// use reserved keywords as name, let's specify generated column
+			// name parameters to ensure the query doesn't explode
+			sql.append("' '").append(TypesMapping.getJavaBySqlType(attribute.getType()));
+			sql.append("' '").append("pk").append(i);
+			sql.append("')");
+		}
+
+		sql.append(" FROM ").append(quoter.quotedFullyQualifiedName(joinEntity)).append(" WHERE ");
+		int i = snapshot.size();
+		for (Object key : snapshot.keySet()) {
+			sql.append(quoter.quotedIdentifier(joinEntity, String.valueOf(key))).append(" #bindEqual($").append(key)
+					.append(")");
+
+			if (--i > 0) {
+				sql.append(" AND ");
+			}
+		}
+
+		SQLTemplate query = new SQLTemplate(joinEntity.getDataMap(), sql.toString(), true);
+		query.setParams(snapshot);
+
+		final List[] result = new List[1];
+
+		node.performQueries(Collections.singleton((Query) query), new DefaultOperationObserver() {
+
+			@Override
+			public void nextRows(Query query, List dataRows) {
+
+				if (!dataRows.isEmpty()) {
+					// decode results...
+
+					List<DataRow> fixedRows = new ArrayList<DataRow>(dataRows.size());
+					for (Object o : dataRows) {
+						DataRow row = (DataRow) o;
+
+						DataRow fixedRow = new DataRow(2);
+
+						for (int i = 0; i < pkList.size(); i++) {
+							DbAttribute attribute = pkList.get(i);
+							fixedRow.put(attribute.getName(), row.get("pk" + i));
+						}
+
+						fixedRows.add(fixedRow);
+					}
+
+					dataRows = fixedRows;
+				}
+
+				result[0] = dataRows;
+			}
+
+			@Override
+			public void nextQueryException(Query query, Exception ex) {
+				throw new CayenneRuntimeException("Raising from query exception.", Util.unwindException(ex));
+			}
+
+			@Override
+			public void nextGlobalException(Exception ex) {
+				throw new CayenneRuntimeException("Raising from underlyingQueryEngine exception.", Util
+						.unwindException(ex));
+			}
+		});
+
+		return result[0];
+	}
+
+	@Override
+	public int hashCode() {
+		// order ids in array for hashcode consistency purposes. The actual
+		// order direction is not important, as long as it
+		// is consistent across invocations
+
+		int compare = id1.getSourceId().getEntityName().compareTo(id2.getSourceId().getEntityName());
+
+		if (compare == 0) {
+			compare = id1.getIncominArc().getName().compareTo(id2.getIncominArc().getName());
+
+			if (compare == 0) {
+				// since ordering is mostly important for detecting equivalent
+				// FlattenedArc keys coming from 2 opposite directions, the name
+				// of ObjRelationship can be a good criteria
+
+				ObjRelationship or2 = relationship.getReverseRelationship();
+				compare = or2 != null ? relationship.getName().compareTo(or2.getName()) : 1;
+
+				// TODO: if(compare == 0) ??
+			}
+		}
+
+		DbArcId[] ordered;
+		if (compare < 0) {
+			ordered = new DbArcId[] { id1, id2 };
+		} else {
+			ordered = new DbArcId[] { id2, id1 };
+		}
+
+		return new HashCodeBuilder().append(ordered).toHashCode();
+	}
+
+	/**
+	 * Defines equal based on whether the relationship is bidirectional.
+	 */
+	@Override
+	public boolean equals(Object object) {
+
+		if (this == object) {
+			return true;
+		}
+
+		if (!(object instanceof FlattenedArcKey)) {
+			return false;
+		}
+
+		FlattenedArcKey key = (FlattenedArcKey) object;
+
+		// ignore id order in comparison
+		if (id1.equals(key.id1)) {
+			return id2.equals(key.id2);
+		} else if (id1.equals(key.id2)) {
+			return id2.equals(key.id1);
+		}
+
+		return false;
+	}
+
+	private Map eagerJoinSnapshot() {
+
+		List<DbRelationship> relList = relationship.getDbRelationships();
+		if (relList.size() != 2) {
+			throw new CayenneRuntimeException(
+					"Only single-step flattened relationships are supported in this operation: " + relationship);
+		}
+
+		DbRelationship firstDbRel = relList.get(0);
+		DbRelationship secondDbRel = relList.get(1);
+
+		// here ordering of ids is determined by 'relationship', so use id1, id2
+		// instead of orderedIds
+		Map<String, ?> sourceId = id1.getSourceId().getIdSnapshot();
+		Map<String, ?> destinationId = id2.getSourceId().getIdSnapshot();
+
+		Map<String, Object> snapshot = new HashMap<>(sourceId.size() + destinationId.size(), 1);
+		for (DbJoin join : firstDbRel.getJoins()) {
+			snapshot.put(join.getTargetName(), sourceId.get(join.getSourceName()));
+		}
+
+		for (DbJoin join : secondDbRel.getJoins()) {
+			snapshot.put(join.getSourceName(), destinationId.get(join.getTargetName()));
+		}
+
+		return snapshot;
+	}
+
+	private Map<String, Object> lazyJoinSnapshot() {
+
+		List<DbRelationship> relList = relationship.getDbRelationships();
+		if (relList.size() != 2) {
+			throw new CayenneRuntimeException(
+					"Only single-step flattened relationships are supported in this operation: " + relationship);
+		}
+
+		DbRelationship firstDbRel = relList.get(0);
+		DbRelationship secondDbRel = relList.get(1);
+
+		List<DbJoin> fromSourceJoins = firstDbRel.getJoins();
+		List<DbJoin> toTargetJoins = secondDbRel.getJoins();
+
+		Map<String, Object> snapshot = new HashMap<>(fromSourceJoins.size() + toTargetJoins.size(), 1);
+
+		// here ordering of ids is determined by 'relationship', so use id1, id2
+		// instead of orderedIds
+
+		for (int i = 0, numJoins = fromSourceJoins.size(); i < numJoins; i++) {
+			DbJoin join = fromSourceJoins.get(i);
+
+			Object value = new PropagatedValueFactory(id1.getSourceId(), join.getSourceName());
+			snapshot.put(join.getTargetName(), value);
+		}
+
+		for (int i = 0, numJoins = toTargetJoins.size(); i < numJoins; i++) {
+			DbJoin join = toTargetJoins.get(i);
+			Object value = new PropagatedValueFactory(id2.getSourceId(), join.getTargetName());
+			snapshot.put(join.getSourceName(), value);
+		}
+
+		return snapshot;
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/13d0da53/cayenne-server/src/main/java/org/apache/cayenne/access/ObjectDiff.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/access/ObjectDiff.java b/cayenne-server/src/main/java/org/apache/cayenne/access/ObjectDiff.java
index f85d482..45b7176 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/access/ObjectDiff.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/access/ObjectDiff.java
@@ -91,8 +91,8 @@ class ObjectDiff extends NodeDiff {
             ObjEntity entity = entityResolver.getObjEntity(entityName);
             final boolean lock = entity.getLockType() == ObjEntity.LOCK_TYPE_OPTIMISTIC;
 
-            this.snapshot = new HashMap<String, Object>();
-            this.arcSnapshot = new HashMap<String, Object>();
+            this.snapshot = new HashMap<>();
+            this.arcSnapshot = new HashMap<>();
 
             classDescriptor.visitProperties(new PropertyVisitor() {
 
@@ -270,7 +270,7 @@ class ObjectDiff extends NodeDiff {
         }
 
         if (currentArcSnapshot == null) {
-            currentArcSnapshot = new HashMap<String, Object>();
+            currentArcSnapshot = new HashMap<>();
         }
 
         currentArcSnapshot.put(arcId, arcDiff.getTargetNodeId());
@@ -432,7 +432,7 @@ class ObjectDiff extends NodeDiff {
      */
     void updateArcSnapshot(String propertyName, Persistent object) {
         if (arcSnapshot == null) {
-            arcSnapshot = new HashMap<String, Object>();
+            arcSnapshot = new HashMap<>();
         }
 
         arcSnapshot.put(propertyName, object != null ? object.getObjectId() : null);


Mime
View raw message