Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions ci/jpa-3.1-tck.Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ pipeline {
}
parameters {
choice(name: 'IMAGE_JDK', choices: ['jdk11'], description: 'The JDK base image version to use for the TCK image.')
string(name: 'TCK_VERSION', defaultValue: '3.1.2', description: 'The version of the Jakarta JPA TCK i.e. `2.2.0` or `3.0.1`')
string(name: 'TCK_SHA', defaultValue: '618a9fcdb0f897cda71227ed57d035ae1dc40fc392318809a734ffc6968e43ff', description: 'The SHA256 of the Jakarta JPA TCK that is distributed under https://download.eclipse.org/jakartaee/persistence/3.1/jakarta-persistence-tck-${TCK_VERSION}.zip.sha256')
string(name: 'TCK_VERSION', defaultValue: '3.1.6', description: 'The version of the Jakarta JPA TCK i.e. `2.2.0` or `3.0.1`')
string(name: 'TCK_SHA', defaultValue: '790ca7a2a95ea098cfedafa2689c0d7a379fa62c74fed9505dd23191292f59fe', description: 'The SHA256 of the Jakarta JPA TCK that is distributed under https://download.eclipse.org/jakartaee/persistence/3.1/jakarta-persistence-tck-${TCK_VERSION}.zip.sha256')
booleanParam(name: 'NO_SLEEP', defaultValue: true, description: 'Whether the NO_SLEEP patch should be applied to speed up the TCK execution')
}
stages {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.DiscriminatorType;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.persister.entity.JoinedSubclassEntityPersister;
Expand Down Expand Up @@ -88,18 +89,23 @@ public BasicFetch<?> generateFetch(
final TableGroup tableGroup = sqlAstCreationState.getFromClauseAccess().getTableGroup(
fetchParent.getNavigablePath()
);
// Since the expression is lazy, based on the available table reference joins,
// we need to force the initialization in case this is a fetch
tableDiscriminatorDetailsMap.forEach(
(tableName, tableDiscriminatorDetails) -> tableGroup.getTableReference(
fetchablePath,
tableName,
true
)
);
resolveSubTypeTableReferences( tableGroup, fetchablePath );
return super.generateFetch( fetchParent, fetchablePath, fetchTiming, selected, resultVariable, creationState );
}

private void resolveSubTypeTableReferences(TableGroup tableGroup, NavigablePath navigablePath) {
final EntityMappingType entityDescriptor = (EntityMappingType) tableGroup.getModelPart().getPartMappingType();
// Since the expression is lazy, based on the available table reference joins,
// we need to force the initialization in case this is selected
for ( EntityMappingType subMappingType : entityDescriptor.getSubMappingTypes() ) {
tableGroup.getTableReference(
navigablePath,
subMappingType.getMappedTableDetails().getTableName(),
true
);
}
}

@Override
public Expression resolveSqlExpression(
NavigablePath navigablePath,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,10 @@ public void finishInitialization(RuntimeModelCreationContext context) {
registerEntityNameResolvers( persister, entityNameResolvers );
}

for ( EntityPersister persister : entityPersisterMap.values() ) {
persister.prepareLoaders();
}

collectionPersisterMap.values().forEach( CollectionPersister::postInstantiate );

registerEmbeddableMappingType( bootModel );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Collectors;

import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.AssertionFailure;
Expand Down Expand Up @@ -127,7 +128,6 @@
import org.hibernate.jdbc.TooManyRowsAffectedException;
import org.hibernate.loader.ast.internal.MultiKeyLoadHelper;
import org.hibernate.loader.ast.internal.CacheEntityLoaderHelper;
import org.hibernate.engine.profile.internal.FetchProfileAffectee;
import org.hibernate.loader.ast.internal.LoaderSelectBuilder;
import org.hibernate.loader.ast.internal.LoaderSqlAstCreationState;
import org.hibernate.loader.ast.internal.MultiIdEntityLoaderArrayParam;
Expand Down Expand Up @@ -205,6 +205,7 @@
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.EntityInstantiator;
import org.hibernate.metamodel.spi.EntityRepresentationStrategy;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.mutation.DeleteCoordinator;
Expand Down Expand Up @@ -232,6 +233,7 @@
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.Alias;
import org.hibernate.sql.Delete;
import org.hibernate.sql.InFragment;
import org.hibernate.sql.SimpleSelect;
import org.hibernate.sql.Template;
import org.hibernate.sql.ast.spi.SimpleFromClauseAccessImpl;
Expand Down Expand Up @@ -307,6 +309,7 @@
import static org.hibernate.generator.EventType.UPDATE;
import static org.hibernate.internal.util.ReflectHelper.isAbstractClass;
import static org.hibernate.internal.util.StringHelper.isEmpty;
import static org.hibernate.internal.util.StringHelper.qualifyConditionally;
import static org.hibernate.internal.util.collections.ArrayHelper.contains;
import static org.hibernate.internal.util.collections.ArrayHelper.to2DStringArray;
import static org.hibernate.internal.util.collections.ArrayHelper.toBooleanArray;
Expand Down Expand Up @@ -3171,6 +3174,68 @@ else if ( discriminatorValue == NOT_NULL_DISCRIMINATOR ) {
return predicate;
}

protected String getPrunedDiscriminatorPredicate(
Map<String, EntityNameUse> entityNameUses,
MappingMetamodelImplementor mappingMetamodel,
String alias) {
final InFragment frag = new InFragment();
if ( isDiscriminatorFormula() ) {
frag.setFormula( alias, getDiscriminatorFormulaTemplate() );
}
else {
frag.setColumn( alias, getDiscriminatorColumnName() );
}
boolean containsNotNull = false;
for ( Map.Entry<String, EntityNameUse> entry : entityNameUses.entrySet() ) {
final EntityNameUse.UseKind useKind = entry.getValue().getKind();
if ( useKind == EntityNameUse.UseKind.PROJECTION || useKind == EntityNameUse.UseKind.EXPRESSION ) {
// We only care about treat and filter uses which allow to reduce the amount of rows to select
continue;
}
final EntityPersister persister = mappingMetamodel.getEntityDescriptor( entry.getKey() );
// Filtering for abstract entities makes no sense, so ignore that
// Also, it makes no sense to filter for any of the super types,
// as the query will contain a filter for that already anyway
if ( !persister.isAbstract() && ( this == persister || !isTypeOrSuperType( persister ) ) ) {
containsNotNull = containsNotNull || InFragment.NOT_NULL.equals( persister.getDiscriminatorSQLValue() );
frag.addValue( persister.getDiscriminatorSQLValue() );
}
}
final List<String> discriminatorSQLValues = Arrays.asList( ( (AbstractEntityPersister) getRootEntityDescriptor() ).fullDiscriminatorSQLValues );
if ( frag.getValues().size() == discriminatorSQLValues.size() ) {
// Nothing to prune if we filter for all subtypes
return null;
}

if ( containsNotNull ) {
final String lhs;
if ( isDiscriminatorFormula() ) {
lhs = StringHelper.replace( getDiscriminatorFormulaTemplate(), Template.TEMPLATE, alias );
}
else {
lhs = qualifyConditionally( alias, getDiscriminatorColumnName() );
}
final List<String> actualDiscriminatorSQLValues = new ArrayList<>( discriminatorSQLValues.size() );
for ( String value : discriminatorSQLValues ) {
if ( !frag.getValues().contains( value ) && !InFragment.NULL.equals( value ) ) {
actualDiscriminatorSQLValues.add( value );
}
}
final StringBuilder sb = new StringBuilder( 70 + actualDiscriminatorSQLValues.size() * 10 ).append( " or " );
if ( !actualDiscriminatorSQLValues.isEmpty() ) {
sb.append( lhs ).append( " is not in (" );
sb.append( String.join( ",", actualDiscriminatorSQLValues ) );
sb.append( ") and " );
}
sb.append( lhs ).append( " is not null" );
frag.getValues().remove( InFragment.NOT_NULL );
return frag.toFragmentString() + sb;
}
else {
return frag.toFragmentString();
}
}

@Override
public void applyFilterRestrictions(
Consumer<Predicate> predicateConsumer,
Expand Down Expand Up @@ -3526,6 +3591,10 @@ protected String substituteBrackets(String sql) {
@Override
public final void postInstantiate() throws MappingException {
doLateInit();
}

@Override
public void prepareLoaders() {
prepareLoader( singleIdLoader );
prepareLoader( multiIdLoader );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,17 @@ public interface EntityPersister extends EntityMappingType, RootTableGroupProduc
*/
void postInstantiate() throws MappingException;

/**
* Prepare loaders associated with the persister. Distinct "phase"
* in building the persister after {@linkplain InFlightEntityMappingType#prepareMappingModel}
* and {@linkplain #postInstantiate()} have occurred.
* <p/>
* The distinct phase is used to ensure that all {@linkplain org.hibernate.metamodel.mapping.TableDetails}
* are available across the entire model
*/
default void prepareLoaders() {
}

/**
* Return the {@link org.hibernate.SessionFactory} to which this persister
* belongs.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import org.hibernate.metamodel.mapping.internal.MappingModelCreationProcess;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.internal.SqlFragmentPredicate;
import org.hibernate.persister.spi.PersisterCreationContext;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.spi.NavigablePath;
Expand All @@ -75,6 +76,7 @@
import org.jboss.logging.Logger;

import static java.util.Collections.emptyMap;
import static org.hibernate.internal.util.collections.ArrayHelper.indexOf;
import static org.hibernate.internal.util.collections.ArrayHelper.to2DStringArray;
import static org.hibernate.internal.util.collections.ArrayHelper.toIntArray;
import static org.hibernate.internal.util.collections.ArrayHelper.toStringArray;
Expand Down Expand Up @@ -1271,7 +1273,8 @@ public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {

@Override
public TableDetails getMappedTableDetails() {
return getTableMapping( getTableMappings().length - 1 );
// Subtract the number of secondary tables (tableSpan - coreTableSpan) and get the last table mapping
return getTableMapping( getTableMappings().length - ( tableSpan - coreTableSpan ) - 1 );
}

@Override
Expand Down Expand Up @@ -1314,6 +1317,7 @@ public void pruneForSubclasses(TableGroup tableGroup, Map<String, EntityNameUse>
// i.e. with parenthesis around, as that means the table reference joins will be isolated
final boolean innerJoinOptimization = tableGroup.canUseInnerJoins() || tableGroup.isRealTableGroup();
final Set<String> tablesToInnerJoin = innerJoinOptimization ? new HashSet<>() : null;
boolean needsTreatDiscriminator = false;
for ( Map.Entry<String, EntityNameUse> entry : entityNameUses.entrySet() ) {
final EntityNameUse.UseKind useKind = entry.getValue().getKind();
final JoinedSubclassEntityPersister persister =
Expand Down Expand Up @@ -1355,15 +1359,22 @@ public void pruneForSubclasses(TableGroup tableGroup, Map<String, EntityNameUse>
}
}
}
final String tableName = persister.getTableName();
final TableReference mainTableReference = tableGroup.getTableReference(
null,
persister.getTableName(),
tableName,
false
);
if ( mainTableReference == null ) {
throw new UnknownTableReferenceException( persister.getTableName(), "Couldn't find table reference" );
if ( mainTableReference != null ) {
retainedTableReferences.add( mainTableReference );
}
if ( needsDiscriminator() ) {
// We allow multiple joined subclasses to use the same table if they define a discriminator column.
// In this case, we might need to add a discriminator condition to make sure we filter the correct subtype,
// see SingleTableEntityPersister#pruneForSubclasses for more details on this condition
needsTreatDiscriminator = needsTreatDiscriminator || !persister.isAbstract() &&
!isTypeOrSuperType( persister ) && useKind == EntityNameUse.UseKind.TREAT;
}
retainedTableReferences.add( mainTableReference );
}
// If no tables to inner join have been found, we add at least the super class tables of this persister
if ( innerJoinOptimization && tablesToInnerJoin.isEmpty() ) {
Expand All @@ -1376,6 +1387,30 @@ public void pruneForSubclasses(TableGroup tableGroup, Map<String, EntityNameUse>
}

final List<TableReferenceJoin> tableReferenceJoins = tableGroup.getTableReferenceJoins();
if ( needsTreatDiscriminator ) {
if ( tableReferenceJoins.isEmpty() ) {
// We need to apply the discriminator predicate to the primary table reference itself
final String discriminatorPredicate = getPrunedDiscriminatorPredicate( entityNameUses, metamodel, "t" );
if ( discriminatorPredicate != null ) {
final NamedTableReference tableReference = (NamedTableReference) tableGroup.getPrimaryTableReference();
tableReference.setPrunedTableExpression( "(select * from " + getRootTableName() + " t where " + discriminatorPredicate + ")" );
}
}
else {
// We have to apply the discriminator condition to the root table reference join
boolean applied = applyDiscriminatorPredicate(
tableReferenceJoins.get( 0 ),
(NamedTableReference) tableGroup.getPrimaryTableReference(),
entityNameUses,
metamodel
);
for ( int i = 0; !applied && i < tableReferenceJoins.size(); i++ ) {
final TableReferenceJoin join = tableReferenceJoins.get( i );
applied = applyDiscriminatorPredicate( join, join.getJoinedTableReference(), entityNameUses, metamodel );
}
assert applied : "Could not apply treat discriminator predicate to root table join";
}
}
if ( tableReferenceJoins.isEmpty() ) {
return;
}
Expand All @@ -1394,9 +1429,8 @@ public void pruneForSubclasses(TableGroup tableGroup, Map<String, EntityNameUse>
tableReferenceJoins.add( join );
}
else {
final String tableExpression = oldJoin.getJoinedTableReference().getTableExpression();
for ( int i = subclassCoreTableSpan; i < subclassTableNameClosure.length; i++ ) {
if ( tableExpression.equals( subclassTableNameClosure[i] ) ) {
if ( joinedTableReference.getTableExpression().equals( subclassTableNameClosure[i] ) ) {
// Retain joins to secondary tables
tableReferenceJoins.add( oldJoin );
break;
Expand All @@ -1410,6 +1444,24 @@ public void pruneForSubclasses(TableGroup tableGroup, Map<String, EntityNameUse>
}
}

private boolean applyDiscriminatorPredicate(
TableReferenceJoin join,
NamedTableReference tableReference,
Map<String, EntityNameUse> entityNameUses,
MappingMetamodelImplementor metamodel) {
if ( tableReference.getTableExpression().equals( getRootTableName() ) ) {
assert join.getJoinType() == SqlAstJoinType.INNER : "Found table reference join with root table of non-INNER type: " + join.getJoinType();
final String discriminatorPredicate = getPrunedDiscriminatorPredicate(
entityNameUses,
metamodel,
tableReference.getIdentificationVariable()
);
join.applyPredicate( new SqlFragmentPredicate( discriminatorPredicate ) );
return true;
}
return false;
}

@Override
public void visitConstraintOrderedTables(ConstraintOrderedTableConsumer consumer) {
for ( int i = 0; i < constraintOrderedTableNames.length; i++ ) {
Expand Down
Loading
Loading