
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.model.util.JpaConstants; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 081import ca.uhn.fhir.jpa.util.SqlQueryList; 082import ca.uhn.fhir.model.api.IQueryParameterType; 083import ca.uhn.fhir.model.api.Include; 084import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 085import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 086import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 087import ca.uhn.fhir.rest.api.Constants; 088import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 089import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 090import ca.uhn.fhir.rest.api.SortOrderEnum; 091import ca.uhn.fhir.rest.api.SortSpec; 092import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 093import ca.uhn.fhir.rest.api.server.RequestDetails; 094import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 095import ca.uhn.fhir.rest.param.DateParam; 096import ca.uhn.fhir.rest.param.DateRangeParam; 097import ca.uhn.fhir.rest.param.ParamPrefixEnum; 098import ca.uhn.fhir.rest.param.ParameterUtil; 099import ca.uhn.fhir.rest.param.ReferenceParam; 100import ca.uhn.fhir.rest.param.StringParam; 101import ca.uhn.fhir.rest.param.TokenParam; 102import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 103import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 104import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 105import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 106import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 107import ca.uhn.fhir.system.HapiSystemProperties; 108import ca.uhn.fhir.util.SearchParameterUtil; 109import ca.uhn.fhir.util.StopWatch; 110import ca.uhn.fhir.util.StringUtil; 111import ca.uhn.fhir.util.UrlUtil; 112import com.google.common.annotations.VisibleForTesting; 113import com.google.common.collect.ListMultimap; 114import com.google.common.collect.Lists; 115import com.google.common.collect.MultimapBuilder; 116import com.healthmarketscience.sqlbuilder.Condition; 117import jakarta.annotation.Nonnull; 118import jakarta.annotation.Nullable; 119import jakarta.persistence.EntityManager; 120import jakarta.persistence.PersistenceContext; 121import jakarta.persistence.PersistenceContextType; 122import jakarta.persistence.Query; 123import jakarta.persistence.Tuple; 124import jakarta.persistence.TypedQuery; 125import jakarta.persistence.criteria.CriteriaBuilder; 126import jakarta.persistence.criteria.CriteriaQuery; 127import jakarta.persistence.criteria.Predicate; 128import jakarta.persistence.criteria.Root; 129import jakarta.persistence.criteria.Selection; 130import org.apache.commons.collections4.ListUtils; 131import org.apache.commons.lang3.StringUtils; 132import org.apache.commons.lang3.Validate; 133import org.apache.commons.lang3.math.NumberUtils; 134import org.apache.commons.lang3.tuple.Pair; 135import org.hibernate.ScrollMode; 136import org.hibernate.ScrollableResults; 137import org.hl7.fhir.instance.model.api.IAnyResource; 138import org.hl7.fhir.instance.model.api.IBaseResource; 139import org.hl7.fhir.instance.model.api.IIdType; 140import org.slf4j.Logger; 141import org.slf4j.LoggerFactory; 142import org.springframework.beans.factory.annotation.Autowired; 143import org.springframework.jdbc.core.JdbcTemplate; 144import org.springframework.transaction.support.TransactionSynchronizationManager; 145 146import java.util.ArrayList; 147import java.util.Arrays; 148import java.util.Collection; 149import java.util.Collections; 150import java.util.Comparator; 151import java.util.HashMap; 152import java.util.HashSet; 153import java.util.Iterator; 154import java.util.LinkedList; 155import java.util.List; 156import java.util.Map; 157import java.util.Objects; 158import java.util.Optional; 159import java.util.Set; 160import java.util.stream.Collectors; 161 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.NO_MORE; 163import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 165import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 166import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 167import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 168import static java.util.Objects.requireNonNull; 169import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 170import static org.apache.commons.lang3.StringUtils.isBlank; 171import static org.apache.commons.lang3.StringUtils.isNotBlank; 172import static org.apache.commons.lang3.StringUtils.stripStart; 173 174/** 175 * The SearchBuilder is responsible for actually forming the SQL query that handles 176 * searches for resources 177 */ 178public class SearchBuilder implements ISearchBuilder<JpaPid> { 179 180 /** 181 * See loadResourcesByPid 182 * for an explanation of why we use the constant 800 183 */ 184 // NB: keep public 185 @Deprecated 186 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 187 188 public static final String RESOURCE_ID_ALIAS = "resource_id"; 189 public static final String PARTITION_ID_ALIAS = "partition_id"; 190 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 191 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 192 193 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 194 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 195 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 196 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 197 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 198 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 199 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 200 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 201 public static Integer myMaxPageSizeForTests = null; 202 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 203 protected final IResourceTagDao myResourceTagDao; 204 private String myResourceName; 205 private final Class<? extends IBaseResource> myResourceType; 206 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 207 private final SqlObjectFactory mySqlBuilderFactory; 208 private final HibernatePropertiesProvider myDialectProvider; 209 private final ISearchParamRegistry mySearchParamRegistry; 210 private final PartitionSettings myPartitionSettings; 211 private final DaoRegistry myDaoRegistry; 212 private final FhirContext myContext; 213 private final IIdHelperService<JpaPid> myIdHelperService; 214 private final JpaStorageSettings myStorageSettings; 215 private final SearchQueryProperties mySearchProperties; 216 private final IResourceHistoryTableDao myResourceHistoryTableDao; 217 private final IJpaStorageResourceParser myJpaStorageResourceParser; 218 219 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 220 protected EntityManager myEntityManager; 221 222 private CriteriaBuilder myCriteriaBuilder; 223 private SearchParameterMap myParams; 224 private String mySearchUuid; 225 private int myFetchSize; 226 227 private boolean myRequiresTotal; 228 229 /** 230 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 231 */ 232 private Set<JpaPid> myPidSet; 233 234 private boolean myHasNextIteratorQuery = false; 235 private RequestPartitionId myRequestPartitionId; 236 237 private IFulltextSearchSvc myFulltextSearchSvc; 238 239 @Autowired(required = false) 240 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 241 myFulltextSearchSvc = theFulltextSearchSvc; 242 } 243 244 @Autowired(required = false) 245 private IElasticsearchSvc myIElasticsearchSvc; 246 247 @Autowired 248 private IResourceHistoryTagDao myResourceHistoryTagDao; 249 250 @Autowired 251 private IRequestPartitionHelperSvc myPartitionHelperSvc; 252 253 /** 254 * Constructor 255 */ 256 @SuppressWarnings({"rawtypes", "unchecked"}) 257 public SearchBuilder( 258 String theResourceName, 259 JpaStorageSettings theStorageSettings, 260 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 261 SqlObjectFactory theSqlBuilderFactory, 262 HibernatePropertiesProvider theDialectProvider, 263 ISearchParamRegistry theSearchParamRegistry, 264 PartitionSettings thePartitionSettings, 265 IInterceptorBroadcaster theInterceptorBroadcaster, 266 IResourceTagDao theResourceTagDao, 267 DaoRegistry theDaoRegistry, 268 FhirContext theContext, 269 IIdHelperService theIdHelperService, 270 IResourceHistoryTableDao theResourceHistoryTagDao, 271 IJpaStorageResourceParser theIJpaStorageResourceParser, 272 Class<? extends IBaseResource> theResourceType) { 273 myResourceName = theResourceName; 274 myResourceType = theResourceType; 275 myStorageSettings = theStorageSettings; 276 277 myEntityManagerFactory = theEntityManagerFactory; 278 mySqlBuilderFactory = theSqlBuilderFactory; 279 myDialectProvider = theDialectProvider; 280 mySearchParamRegistry = theSearchParamRegistry; 281 myPartitionSettings = thePartitionSettings; 282 myInterceptorBroadcaster = theInterceptorBroadcaster; 283 myResourceTagDao = theResourceTagDao; 284 myDaoRegistry = theDaoRegistry; 285 myContext = theContext; 286 myIdHelperService = theIdHelperService; 287 myResourceHistoryTableDao = theResourceHistoryTagDao; 288 myJpaStorageResourceParser = theIJpaStorageResourceParser; 289 290 mySearchProperties = new SearchQueryProperties(); 291 } 292 293 @VisibleForTesting 294 void setResourceName(String theName) { 295 myResourceName = theName; 296 } 297 298 @Override 299 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 300 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 301 } 302 303 @Override 304 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 305 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 306 } 307 308 @Override 309 public void setRequireTotal(boolean theRequireTotal) { 310 myRequiresTotal = theRequireTotal; 311 } 312 313 @Override 314 public boolean requiresTotal() { 315 return myRequiresTotal; 316 } 317 318 private void searchForIdsWithAndOr( 319 SearchQueryBuilder theSearchSqlBuilder, 320 QueryStack theQueryStack, 321 @Nonnull SearchParameterMap theParams, 322 RequestDetails theRequest) { 323 myParams = theParams; 324 mySearchProperties.setSortSpec(myParams.getSort()); 325 326 // Remove any empty parameters 327 theParams.clean(); 328 329 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 330 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 331 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 332 } 333 334 // Attempt to lookup via composite unique key. 335 if (isCompositeUniqueSpCandidate()) { 336 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 337 } 338 339 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 340 List<String> paramNames = myParams.keySet().stream() 341 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 342 .filter(t -> !t.equals(Constants.PARAM_TAG)) 343 .collect(Collectors.toList()); 344 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 345 paramNames.add(IAnyResource.SP_RES_ID); 346 } 347 if (myParams.containsKey(Constants.PARAM_TAG)) { 348 paramNames.add(Constants.PARAM_TAG); 349 } 350 351 // Handle each parameter 352 for (String nextParamName : paramNames) { 353 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 354 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 355 // Elasticsearch 356 continue; 357 } 358 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 359 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 360 .setParamName(nextParamName) 361 .setAndOrParams(andOrParams) 362 .setRequest(theRequest) 363 .setRequestPartitionId(myRequestPartitionId) 364 .setIncludeDeleted(myParams.getSearchIncludeDeletedMode())); 365 if (predicate != null) { 366 theSearchSqlBuilder.addPredicate(predicate); 367 } 368 } 369 } 370 371 /** 372 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 373 * parameters all have no modifiers. 374 */ 375 private boolean isCompositeUniqueSpCandidate() { 376 return myStorageSettings.isUniqueIndexesEnabled() 377 && myParams.getEverythingMode() == null 378 && myResourceName != null; 379 } 380 381 @SuppressWarnings("ConstantConditions") 382 @Override 383 public Long createCountQuery( 384 SearchParameterMap theParams, 385 String theSearchUuid, 386 RequestDetails theRequest, 387 @Nonnull RequestPartitionId theRequestPartitionId) { 388 389 assert theRequestPartitionId != null; 390 assert TransactionSynchronizationManager.isActualTransactionActive(); 391 392 init(theParams, theSearchUuid, theRequestPartitionId); 393 394 if (checkUseHibernateSearch()) { 395 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 396 } 397 398 SearchQueryProperties properties = mySearchProperties.clone(); 399 properties.setDoCountOnlyFlag(true); 400 properties.setSortSpec(null); // counts don't require sorts 401 properties.setMaxResultsRequested(null); 402 properties.setOffset(null); 403 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 404 if (queries.isEmpty()) { 405 return 0L; 406 } else { 407 JpaPid jpaPid = queries.get(0).next(); 408 return jpaPid.getId(); 409 } 410 } 411 412 /** 413 * @param thePidSet May be null 414 */ 415 @Override 416 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 417 myPidSet = new HashSet<>(thePidSet); 418 } 419 420 @SuppressWarnings("ConstantConditions") 421 @Override 422 public IResultIterator<JpaPid> createQuery( 423 SearchParameterMap theParams, 424 SearchRuntimeDetails theSearchRuntimeDetails, 425 RequestDetails theRequest, 426 @Nonnull RequestPartitionId theRequestPartitionId) { 427 assert theRequestPartitionId != null; 428 assert TransactionSynchronizationManager.isActualTransactionActive(); 429 430 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 431 432 if (myPidSet == null) { 433 myPidSet = new HashSet<>(); 434 } 435 436 return new QueryIterator(theSearchRuntimeDetails, theRequest); 437 } 438 439 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 440 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 441 // we mutate the params. Make a private copy. 442 myParams = theParams.clone(); 443 mySearchProperties.setSortSpec(myParams.getSort()); 444 mySearchUuid = theSearchUuid; 445 myRequestPartitionId = theRequestPartitionId; 446 } 447 448 /** 449 * The query created can be either a count query or the 450 * actual query. 451 * This is why it takes a SearchQueryProperties object 452 * (and doesn't use the local version of it). 453 * The properties may differ slightly for whichever 454 * query this is. 455 */ 456 private List<ISearchQueryExecutor> createQuery( 457 SearchParameterMap theParams, 458 SearchQueryProperties theSearchProperties, 459 RequestDetails theRequest, 460 SearchRuntimeDetails theSearchRuntimeDetails) { 461 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 462 463 if (checkUseHibernateSearch()) { 464 // we're going to run at least part of the search against the Fulltext service. 465 466 // Ugh - we have two different return types for now 467 ISearchQueryExecutor fulltextExecutor = null; 468 List<JpaPid> fulltextMatchIds = null; 469 int resultCount = 0; 470 if (myParams.isLastN()) { 471 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 472 resultCount = fulltextMatchIds.size(); 473 } else if (myParams.getEverythingMode() != null) { 474 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 475 resultCount = fulltextMatchIds.size(); 476 } else { 477 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 478 // enabled SP indexing). 479 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 480 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 481 // extra work in Elastic. 482 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 483 484 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 485 // a page. 486 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 487 } 488 489 if (fulltextExecutor == null) { 490 fulltextExecutor = 491 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 492 } 493 494 if (theSearchRuntimeDetails != null) { 495 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 496 IInterceptorBroadcaster compositeBroadcaster = 497 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 498 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 499 HookParams params = new HookParams() 500 .add(RequestDetails.class, theRequest) 501 .addIfMatchesType(ServletRequestDetails.class, theRequest) 502 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 503 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 504 } 505 } 506 507 // can we skip the database entirely and return the pid list from here? 508 boolean canSkipDatabase = 509 // if we processed an AND clause, and it returned nothing, then nothing can match. 510 !fulltextExecutor.hasNext() 511 || 512 // Our hibernate search query doesn't respect partitions yet 513 (!myPartitionSettings.isPartitioningEnabled() 514 && 515 // were there AND terms left? Then we still need the db. 516 theParams.isEmpty() 517 && 518 // not every param is a param. :-( 519 theParams.getNearDistanceParam() == null 520 && 521 // todo MB don't we support _lastUpdated and _offset now? 522 theParams.getLastUpdated() == null 523 && theParams.getEverythingMode() == null 524 && theParams.getOffset() == null); 525 526 if (canSkipDatabase) { 527 ourLog.trace("Query finished after HSearch. Skip db query phase"); 528 if (theSearchProperties.hasMaxResultsRequested()) { 529 fulltextExecutor = SearchQueryExecutors.limited( 530 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 531 } 532 queries.add(fulltextExecutor); 533 } else { 534 ourLog.trace("Query needs db after HSearch. Chunking."); 535 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 536 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 537 QueryChunker.chunk( 538 fulltextExecutor, 539 SearchBuilder.getMaximumPageSize(), 540 // for each list of (SearchBuilder.getMaximumPageSize()) 541 // we create a chunked query and add it to 'queries' 542 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 543 } 544 } else { 545 // do everything in the database. 546 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 547 } 548 549 return queries; 550 } 551 552 /** 553 * Check to see if query should use Hibernate Search, and error if the query can't continue. 554 * 555 * @return true if the query should first be processed by Hibernate Search 556 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 557 */ 558 private boolean checkUseHibernateSearch() { 559 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 560 561 if (!fulltextEnabled) { 562 failIfUsed(Constants.PARAM_TEXT); 563 failIfUsed(Constants.PARAM_CONTENT); 564 } else { 565 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 566 final String paramName = sortSpec.getParamName(); 567 if (paramName.contains(".")) { 568 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 569 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 570 } 571 } 572 } 573 574 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 575 // can. 576 return fulltextEnabled 577 && myParams != null 578 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 579 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 580 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 581 } 582 583 private void failIfUsed(String theParamName) { 584 if (myParams.containsKey(theParamName)) { 585 throw new InvalidRequestException(Msg.code(1192) 586 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 587 } 588 } 589 590 private void failIfUsedWithChainedSort(String theParamName) { 591 if (myParams.containsKey(theParamName)) { 592 throw new InvalidRequestException(Msg.code(2524) 593 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 594 + theParamName); 595 } 596 } 597 598 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 599 // Can we use our hibernate search generated index on resource to support lastN?: 600 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 601 if (myFulltextSearchSvc == null) { 602 throw new InvalidRequestException(Msg.code(2027) 603 + "LastN operation is not enabled on this service, can not process this request"); 604 } 605 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 606 .map(t -> (JpaPid) t) 607 .collect(Collectors.toList()); 608 } else { 609 throw new InvalidRequestException( 610 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 611 } 612 } 613 614 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 615 JpaPid pid = null; 616 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 617 String idParamValue; 618 IQueryParameterType idParam = 619 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 620 if (idParam instanceof TokenParam idParm) { 621 idParamValue = idParm.getValue(); 622 } else { 623 StringParam idParm = (StringParam) idParam; 624 idParamValue = idParm.getValue(); 625 } 626 627 pid = myIdHelperService 628 .resolveResourceIdentity( 629 myRequestPartitionId, 630 myResourceName, 631 idParamValue, 632 ResolveIdentityMode.includeDeleted().cacheOk()) 633 .getPersistentId(); 634 } 635 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 636 } 637 638 private void doCreateChunkedQueries( 639 SearchParameterMap theParams, 640 List<JpaPid> thePids, 641 SearchQueryProperties theSearchQueryProperties, 642 RequestDetails theRequest, 643 ArrayList<ISearchQueryExecutor> theQueries) { 644 645 if (thePids.size() < getMaximumPageSize()) { 646 thePids = normalizeIdListForInClause(thePids); 647 } 648 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 649 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 650 } 651 652 /** 653 * Combs through the params for any _id parameters and extracts the PIDs for them 654 */ 655 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 656 // get all the IQueryParameterType objects 657 // for _id -> these should all be StringParam values 658 HashSet<IIdType> ids = new HashSet<>(); 659 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 660 for (List<IQueryParameterType> paramList : params) { 661 for (IQueryParameterType param : paramList) { 662 String id; 663 if (param instanceof StringParam) { 664 // we expect all _id values to be StringParams 665 id = ((StringParam) param).getValue(); 666 } else if (param instanceof TokenParam) { 667 id = ((TokenParam) param).getValue(); 668 } else { 669 // we do not expect the _id parameter to be a non-string value 670 throw new IllegalArgumentException( 671 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 672 } 673 674 IIdType idType = myContext.getVersion().newIdType(); 675 if (id.contains("/")) { 676 idType.setValue(id); 677 } else { 678 idType.setValue(myResourceName + "/" + id); 679 } 680 ids.add(idType); 681 } 682 } 683 684 // fetch our target Pids 685 // this will throw if an id is not found 686 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 687 myRequestPartitionId, 688 new ArrayList<>(ids), 689 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 690 691 // add the pids to targetPids 692 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 693 theTargetPids.add(pid.getPersistentId()); 694 } 695 } 696 697 private void createChunkedQuery( 698 SearchParameterMap theParams, 699 SearchQueryProperties theSearchProperties, 700 RequestDetails theRequest, 701 List<JpaPid> thePidList, 702 List<ISearchQueryExecutor> theSearchQueryExecutors) { 703 if (myParams.getEverythingMode() != null) { 704 createChunkedQueryForEverythingSearch( 705 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 706 } else { 707 createChunkedQueryNormalSearch( 708 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 709 } 710 } 711 712 private void createChunkedQueryNormalSearch( 713 SearchParameterMap theParams, 714 SearchQueryProperties theSearchProperties, 715 RequestDetails theRequest, 716 List<JpaPid> thePidList, 717 List<ISearchQueryExecutor> theSearchQueryExecutors) { 718 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 719 myContext, 720 myStorageSettings, 721 myPartitionSettings, 722 myRequestPartitionId, 723 myResourceName, 724 mySqlBuilderFactory, 725 myDialectProvider, 726 theSearchProperties.isDoCountOnlyFlag(), 727 myResourceName == null || myResourceName.isBlank()); 728 QueryStack queryStack3 = new QueryStack( 729 theRequest, 730 theParams, 731 myStorageSettings, 732 myContext, 733 sqlBuilder, 734 mySearchParamRegistry, 735 myPartitionSettings); 736 737 if (theParams.keySet().size() > 1 738 || theParams.getSort() != null 739 || theParams.keySet().contains(Constants.PARAM_HAS) 740 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 741 List<RuntimeSearchParam> activeComboParams = List.of(); 742 if (myResourceName != null) { 743 activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 744 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 745 } 746 if (activeComboParams.isEmpty()) { 747 sqlBuilder.setNeedResourceTableRoot(true); 748 } 749 } 750 751 /* 752 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 753 * specific filters with ORs as their root from working around the natural resource type / deletion 754 * status / partition IDs built into queries. 755 */ 756 if (theParams.containsKey(Constants.PARAM_FILTER)) { 757 Condition partitionIdPredicate = sqlBuilder 758 .getOrCreateResourceTablePredicateBuilder() 759 .createPartitionIdPredicate(myRequestPartitionId); 760 if (partitionIdPredicate != null) { 761 sqlBuilder.addPredicate(partitionIdPredicate); 762 } 763 } 764 765 // Normal search 766 // we will create a resourceTablePredicate if and only if we have an _id SP. 767 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 768 769 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 770 // partition ID predicate in that case. 771 if (!sqlBuilder.haveAtLeastOnePredicate()) { 772 Condition partitionIdPredicate; 773 774 if (theParams.getSearchIncludeDeletedMode() != null) { 775 partitionIdPredicate = sqlBuilder 776 .getOrCreateResourceTablePredicateBuilder(true, theParams.getSearchIncludeDeletedMode()) 777 .createPartitionIdPredicate(myRequestPartitionId); 778 } else { 779 partitionIdPredicate = sqlBuilder 780 .getOrCreateResourceTablePredicateBuilder() 781 .createPartitionIdPredicate(myRequestPartitionId); 782 } 783 784 if (partitionIdPredicate != null) { 785 sqlBuilder.addPredicate(partitionIdPredicate); 786 } 787 } 788 789 // Add PID list predicate for full text search and/or lastn operation 790 addPidListPredicate(thePidList, sqlBuilder); 791 792 // Last updated 793 addLastUpdatePredicate(sqlBuilder); 794 795 /* 796 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 797 * to something needed to guarantee correct results. 798 * 799 * Why do we need it? Suppose for example, a query like: 800 * Observation?category=foo,bar,baz 801 * And suppose you have many resources that have all 3 of these category codes. In this case 802 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 803 * we may exhaust the query results without getting enough distinct results back. When that 804 * happens we re-run the query with a larger limit. Excluding results we already know about 805 * tries to ensure that we get new unique results. 806 * 807 * The challenge with that though is that lots of DBs have an issue with too many 808 * parameters in one query. So we only do this optimization if there aren't too 809 * many results. 810 */ 811 if (myHasNextIteratorQuery) { 812 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 813 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 814 } 815 } 816 817 /* 818 * If offset is present, we want to deduplicate the results by using GROUP BY; 819 * OR 820 * if the MaxResultsToFetch is null, we are requesting "everything", 821 * so we'll let the db do the deduplication (instead of in-memory) 822 */ 823 if (theSearchProperties.isDeduplicateInDatabase()) { 824 queryStack3.addGrouping(); 825 queryStack3.setUseAggregate(true); 826 } 827 828 /* 829 * Sort 830 * 831 * If we have a sort, we wrap the criteria search (the search that actually 832 * finds the appropriate resources) in an outer search which is then sorted 833 */ 834 if (theSearchProperties.hasSort()) { 835 assert !theSearchProperties.isDoCountOnlyFlag(); 836 837 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 838 } 839 840 /* 841 * Now perform the search 842 */ 843 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 844 } 845 846 private void executeSearch( 847 SearchQueryProperties theProperties, 848 List<ISearchQueryExecutor> theSearchQueryExecutors, 849 SearchQueryBuilder sqlBuilder) { 850 GeneratedSql generatedSql = 851 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 852 if (!generatedSql.isMatchNothing()) { 853 SearchQueryExecutor executor = 854 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 855 theSearchQueryExecutors.add(executor); 856 } 857 } 858 859 private void createChunkedQueryForEverythingSearch( 860 RequestDetails theRequest, 861 SearchParameterMap theParams, 862 SearchQueryProperties theSearchQueryProperties, 863 List<JpaPid> thePidList, 864 List<ISearchQueryExecutor> theSearchQueryExecutors) { 865 866 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 867 myContext, 868 myStorageSettings, 869 myPartitionSettings, 870 myRequestPartitionId, 871 null, 872 mySqlBuilderFactory, 873 myDialectProvider, 874 theSearchQueryProperties.isDoCountOnlyFlag(), 875 false); 876 877 QueryStack queryStack3 = new QueryStack( 878 theRequest, 879 theParams, 880 myStorageSettings, 881 myContext, 882 sqlBuilder, 883 mySearchParamRegistry, 884 myPartitionSettings); 885 886 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 887 888 Set<JpaPid> targetPids = new HashSet<>(); 889 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 890 891 extractTargetPidsFromIdParams(targetPids); 892 893 // add the target pids to our executors as the first 894 // results iterator to go through 895 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 896 } else { 897 // For Everything queries, we make the query root by the ResourceLink table, since this query 898 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 899 // the one problem with this approach is that it doesn't catch Patients that have absolutely 900 // nothing linked to them. So we do one additional query to make sure we catch those too. 901 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 902 myContext, 903 myStorageSettings, 904 myPartitionSettings, 905 myRequestPartitionId, 906 myResourceName, 907 mySqlBuilderFactory, 908 myDialectProvider, 909 theSearchQueryProperties.isDoCountOnlyFlag(), 910 false); 911 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 912 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 913 String sql = allTargetsSql.getSql(); 914 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 915 916 List<JpaPid> output = 917 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 918 919 // we add a search executor to fetch unlinked patients first 920 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 921 } 922 923 List<String> typeSourceResources = new ArrayList<>(); 924 if (myParams.get(Constants.PARAM_TYPE) != null) { 925 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 926 } 927 928 queryStack3.addPredicateEverythingOperation( 929 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 930 931 // Add PID list predicate for full text search and/or lastn operation 932 addPidListPredicate(thePidList, sqlBuilder); 933 934 /* 935 * If offset is present, we want deduplicate the results by using GROUP BY 936 * ORDER BY is required to make sure we return unique results for each page 937 */ 938 if (theSearchQueryProperties.hasOffset()) { 939 queryStack3.addGrouping(); 940 queryStack3.addOrdering(); 941 queryStack3.setUseAggregate(true); 942 } 943 944 if (myParams.getEverythingMode().isPatient()) { 945 /* 946 * NB: patient-compartment limitation 947 * 948 * We are manually excluding Group and List resources 949 * from the patient-compartment for $everything operations on Patient type/instance. 950 * 951 * See issue: https://github.com/hapifhir/hapi-fhir/issues/7118 952 */ 953 sqlBuilder.excludeResourceTypesPredicate( 954 SearchParameterUtil.RESOURCE_TYPES_TO_SP_TO_OMIT_FROM_PATIENT_COMPARTMENT.keySet()); 955 } 956 957 /* 958 * Now perform the search 959 */ 960 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 961 } 962 963 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 964 if (thePidList != null && !thePidList.isEmpty()) { 965 theSqlBuilder.addResourceIdsPredicate(thePidList); 966 } 967 } 968 969 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 970 DateRangeParam lu = myParams.getLastUpdated(); 971 if (lu != null && !lu.isEmpty()) { 972 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 973 theSqlBuilder.addPredicate(lastUpdatedPredicates); 974 } 975 } 976 977 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 978 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 979 jdbcTemplate.setFetchSize(myFetchSize); 980 if (theMaximumResults != null) { 981 jdbcTemplate.setMaxRows(theMaximumResults); 982 } 983 return jdbcTemplate; 984 } 985 986 private Collection<String> extractTypeSourceResourcesFromParams() { 987 988 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 989 990 // first off, let's flatten the list of list 991 List<IQueryParameterType> iQueryParameterTypesList = 992 listOfList.stream().flatMap(List::stream).toList(); 993 994 // then, extract all elements of each CSV into one big list 995 List<String> resourceTypes = iQueryParameterTypesList.stream() 996 .map(param -> ((StringParam) param).getValue()) 997 .map(csvString -> List.of(csvString.split(","))) 998 .flatMap(List::stream) 999 .toList(); 1000 1001 Set<String> knownResourceTypes = myContext.getResourceTypes(); 1002 1003 // remove leading/trailing whitespaces if any and remove duplicates 1004 Set<String> retVal = new HashSet<>(); 1005 1006 for (String type : resourceTypes) { 1007 String trimmed = type.trim(); 1008 if (!knownResourceTypes.contains(trimmed)) { 1009 throw new ResourceNotFoundException( 1010 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 1011 } 1012 retVal.add(trimmed); 1013 } 1014 1015 return retVal; 1016 } 1017 1018 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 1019 return myStorageSettings.isIndexOnContainedResources() 1020 && theParams.values().stream() 1021 .flatMap(Collection::stream) 1022 .flatMap(Collection::stream) 1023 .anyMatch(ReferenceParam.class::isInstance); 1024 } 1025 1026 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1027 if (theSort == null || isBlank(theSort.getParamName())) { 1028 return; 1029 } 1030 1031 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1032 1033 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1034 1035 theQueryStack.addSortOnResourceId(ascending); 1036 1037 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1038 1039 theQueryStack.addSortOnResourcePID(ascending); 1040 1041 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1042 1043 theQueryStack.addSortOnLastUpdated(ascending); 1044 1045 } else { 1046 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1047 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1048 1049 /* 1050 * If we have a sort like _sort=subject.name and we have an 1051 * uplifted refchain for that combination we can do it more efficiently 1052 * by using the index associated with the uplifted refchain. In this case, 1053 * we need to find the actual target search parameter (corresponding 1054 * to "name" in this example) so that we know what datatype it is. 1055 */ 1056 String paramName = theSort.getParamName(); 1057 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1058 String[] chains = StringUtils.split(paramName, '.'); 1059 if (chains.length == 2) { 1060 1061 // Given: Encounter?_sort=Patient:subject.name 1062 String referenceParam = chains[0]; // subject 1063 String referenceParamTargetType = null; // Patient 1064 String targetParam = chains[1]; // name 1065 1066 int colonIdx = referenceParam.indexOf(':'); 1067 if (colonIdx > -1) { 1068 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1069 referenceParam = referenceParam.substring(colonIdx + 1); 1070 } 1071 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1072 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1073 if (outerParam == null) { 1074 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1075 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1076 for (String nextTargetType : outerParam.getTargets()) { 1077 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1078 continue; 1079 } 1080 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1081 nextTargetType, 1082 targetParam, 1083 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1084 if (innerParam != null) { 1085 param = innerParam; 1086 break; 1087 } 1088 } 1089 } 1090 } 1091 } 1092 1093 int colonIdx = paramName.indexOf(':'); 1094 String referenceTargetType = null; 1095 if (colonIdx > -1) { 1096 referenceTargetType = paramName.substring(0, colonIdx); 1097 paramName = paramName.substring(colonIdx + 1); 1098 } 1099 1100 int dotIdx = paramName.indexOf('.'); 1101 String chainName = null; 1102 if (param == null && dotIdx > -1) { 1103 chainName = paramName.substring(dotIdx + 1); 1104 paramName = paramName.substring(0, dotIdx); 1105 if (chainName.contains(".")) { 1106 String msg = myContext 1107 .getLocalizer() 1108 .getMessageSanitized( 1109 BaseStorageDao.class, 1110 "invalidSortParameterTooManyChains", 1111 paramName + "." + chainName); 1112 throw new InvalidRequestException(Msg.code(2286) + msg); 1113 } 1114 } 1115 1116 if (param == null) { 1117 param = mySearchParamRegistry.getActiveSearchParam( 1118 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1119 } 1120 1121 if (param == null) { 1122 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1123 } 1124 1125 // param will never be null here (the above line throws if it does) 1126 // this is just to prevent the warning 1127 assert param != null; 1128 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1129 throw new InvalidRequestException( 1130 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1131 } 1132 1133 switch (param.getParamType()) { 1134 case STRING: 1135 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1136 break; 1137 case DATE: 1138 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1139 break; 1140 case REFERENCE: 1141 theQueryStack.addSortOnResourceLink( 1142 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1143 break; 1144 case TOKEN: 1145 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1146 break; 1147 case NUMBER: 1148 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1149 break; 1150 case URI: 1151 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1152 break; 1153 case QUANTITY: 1154 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1155 break; 1156 case COMPOSITE: 1157 List<RuntimeSearchParam> compositeList = 1158 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1159 if (compositeList == null) { 1160 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1161 + " is not defined by the resource " + myResourceName); 1162 } 1163 if (compositeList.size() != 2) { 1164 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1165 + " must have 2 composite types declared in parameter annotation, found " 1166 + compositeList.size()); 1167 } 1168 RuntimeSearchParam left = compositeList.get(0); 1169 RuntimeSearchParam right = compositeList.get(1); 1170 1171 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1172 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1173 1174 break; 1175 case SPECIAL: 1176 if (LOCATION_POSITION.equals(param.getPath())) { 1177 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1178 break; 1179 } 1180 throw new InvalidRequestException( 1181 Msg.code(2306) + "This server does not support _sort specifications of type " 1182 + param.getParamType() + " - Can't serve _sort=" + paramName); 1183 1184 case HAS: 1185 default: 1186 throw new InvalidRequestException( 1187 Msg.code(1197) + "This server does not support _sort specifications of type " 1188 + param.getParamType() + " - Can't serve _sort=" + paramName); 1189 } 1190 } 1191 1192 // Recurse 1193 createSort(theQueryStack, theSort.getChain(), theParams); 1194 } 1195 1196 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1197 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1198 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1199 String msg = myContext 1200 .getLocalizer() 1201 .getMessageSanitized( 1202 BaseStorageDao.class, 1203 "invalidSortParameter", 1204 theParamName, 1205 theResourceName, 1206 validSearchParameterNames); 1207 throw new InvalidRequestException(Msg.code(1194) + msg); 1208 } 1209 1210 private void createCompositeSort( 1211 QueryStack theQueryStack, 1212 RestSearchParameterTypeEnum theParamType, 1213 String theParamName, 1214 boolean theAscending) { 1215 1216 switch (theParamType) { 1217 case STRING: 1218 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1219 break; 1220 case DATE: 1221 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1222 break; 1223 case TOKEN: 1224 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1225 break; 1226 case QUANTITY: 1227 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1228 break; 1229 case NUMBER: 1230 case REFERENCE: 1231 case COMPOSITE: 1232 case URI: 1233 case HAS: 1234 case SPECIAL: 1235 default: 1236 throw new InvalidRequestException( 1237 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1238 + " on _sort=" + theParamName); 1239 } 1240 } 1241 1242 private void doLoadPids( 1243 RequestDetails theRequest, 1244 Collection<JpaPid> thePids, 1245 Collection<JpaPid> theIncludedPids, 1246 List<IBaseResource> theResourceListToPopulate, 1247 boolean theForHistoryOperation, 1248 Map<Long, Integer> thePosition) { 1249 1250 Map<JpaPid, Long> resourcePidToVersion = null; 1251 for (JpaPid next : thePids) { 1252 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1253 if (resourcePidToVersion == null) { 1254 resourcePidToVersion = new HashMap<>(); 1255 } 1256 resourcePidToVersion.put(next, next.getVersion()); 1257 } 1258 } 1259 1260 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1261 int expectedCount = versionlessPids.size(); 1262 if (versionlessPids.size() < getMaximumPageSize()) { 1263 /* 1264 * This method adds a bunch of extra params to the end of the parameter list 1265 * which are for a resource PID that will never exist (-1 / NO_MORE). We do this 1266 * so that the database can rely on a cached execution plan since we're not 1267 * generating a new SQL query for every possible number of resources. 1268 */ 1269 versionlessPids = normalizeIdListForInClause(versionlessPids); 1270 } 1271 1272 // Load the resource bodies 1273 List<JpaPidFk> historyVersionPks = JpaPidFk.fromPids(versionlessPids); 1274 List<ResourceHistoryTable> resourceSearchViewList = 1275 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable(historyVersionPks); 1276 1277 /* 1278 * If we have specific versions to load, replace the history entries with the 1279 * correct ones 1280 * 1281 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1282 * version entity first, and by batching the fetches. But this is a fairly infrequently 1283 * used feature, and loading history entities by PK is a very efficient query so it's 1284 * not the end of the world 1285 */ 1286 if (resourcePidToVersion != null) { 1287 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1288 ResourceHistoryTable next = resourceSearchViewList.get(i); 1289 JpaPid resourceId = next.getPersistentId(); 1290 Long version = resourcePidToVersion.get(resourceId); 1291 resourceId.setVersion(version); 1292 if (version != null && !version.equals(next.getVersion())) { 1293 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1294 next.getResourceId().toFk(), version); 1295 resourceSearchViewList.set(i, replacement); 1296 } 1297 } 1298 } 1299 1300 /* 1301 * If we got fewer rows back than we expected, that means that one or more ResourceTable 1302 * entities (HFJ_RESOURCE) have a RES_VER version which doesn't exist in the 1303 * ResourceHistoryTable (HFJ_RES_VER) table. This should never happen under normal 1304 * operation, but if someone manually deletes a row or otherwise ends up in a weird 1305 * state it can happen. In that case, we do a manual process of figuring out what 1306 * is the right version. 1307 */ 1308 if (resourceSearchViewList.size() != expectedCount) { 1309 1310 Set<JpaPid> loadedPks = resourceSearchViewList.stream() 1311 .map(ResourceHistoryTable::getResourceId) 1312 .collect(Collectors.toSet()); 1313 for (JpaPid nextWantedPid : versionlessPids) { 1314 if (!nextWantedPid.equals(NO_MORE) && !loadedPks.contains(nextWantedPid)) { 1315 Optional<ResourceHistoryTable> latestVersion = findLatestVersion( 1316 theRequest, nextWantedPid, myResourceHistoryTableDao, myInterceptorBroadcaster); 1317 latestVersion.ifPresent(resourceSearchViewList::add); 1318 } 1319 } 1320 } 1321 1322 // -- preload all tags with tag definition if any 1323 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1324 1325 for (ResourceHistoryTable next : resourceSearchViewList) { 1326 if (next.getDeleted() != null) { 1327 continue; 1328 } 1329 1330 Class<? extends IBaseResource> resourceType = 1331 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1332 1333 JpaPid resourceId = next.getPersistentId(); 1334 1335 if (resourcePidToVersion != null) { 1336 Long version = resourcePidToVersion.get(resourceId); 1337 resourceId.setVersion(version); 1338 } 1339 1340 IBaseResource resource; 1341 resource = myJpaStorageResourceParser.toResource( 1342 theRequest, resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1343 if (resource == null) { 1344 ourLog.warn( 1345 "Unable to find resource {}/{}/_history/{} in database", 1346 next.getResourceType(), 1347 next.getIdDt().getIdPart(), 1348 next.getVersion()); 1349 continue; 1350 } 1351 1352 Integer index = thePosition.get(resourceId.getId()); 1353 if (index == null) { 1354 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1355 continue; 1356 } 1357 1358 if (theIncludedPids.contains(resourceId)) { 1359 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1360 } else { 1361 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1362 } 1363 1364 // ensure there's enough space; "<=" because of 0-indexing 1365 while (theResourceListToPopulate.size() <= index) { 1366 theResourceListToPopulate.add(null); 1367 } 1368 theResourceListToPopulate.set(index, resource); 1369 } 1370 } 1371 1372 @SuppressWarnings("OptionalIsPresent") 1373 @Nonnull 1374 public static Optional<ResourceHistoryTable> findLatestVersion( 1375 RequestDetails theRequest, 1376 JpaPid nextWantedPid, 1377 IResourceHistoryTableDao resourceHistoryTableDao, 1378 IInterceptorBroadcaster interceptorBroadcaster1) { 1379 assert nextWantedPid != null && !nextWantedPid.equals(NO_MORE); 1380 1381 Optional<ResourceHistoryTable> latestVersion = resourceHistoryTableDao 1382 .findVersionsForResource(JpaConstants.SINGLE_RESULT, nextWantedPid.toFk()) 1383 .findFirst(); 1384 String warning; 1385 if (latestVersion.isPresent()) { 1386 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1387 + " specifies an unknown current version, returning version " 1388 + latestVersion.get().getVersion() 1389 + " instead. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1390 } else { 1391 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1392 + " specifies an unknown current version, and no versions of this resource exist. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1393 } 1394 1395 IInterceptorBroadcaster interceptorBroadcaster = 1396 CompositeInterceptorBroadcaster.newCompositeBroadcaster(interceptorBroadcaster1, theRequest); 1397 logAndBoradcastWarning(theRequest, warning, interceptorBroadcaster); 1398 return latestVersion; 1399 } 1400 1401 private static void logAndBoradcastWarning( 1402 RequestDetails theRequest, String warning, IInterceptorBroadcaster interceptorBroadcaster) { 1403 ourLog.warn(warning); 1404 1405 if (interceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) { 1406 HookParams params = new HookParams(); 1407 params.add(RequestDetails.class, theRequest); 1408 params.addIfMatchesType(ServletRequestDetails.class, theRequest); 1409 params.add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(warning)); 1410 interceptorBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params); 1411 } 1412 } 1413 1414 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1415 return switch (myStorageSettings.getTagStorageMode()) { 1416 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1417 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1418 case INLINE -> Map.of(); 1419 }; 1420 } 1421 1422 @Nonnull 1423 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1424 Collection<ResourceHistoryTable> theHistoryTables) { 1425 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1426 1427 // -- find all resource has tags 1428 for (ResourceHistoryTable resource : theHistoryTables) { 1429 if (resource.isHasTags()) { 1430 idList.add(resource.getId()); 1431 } 1432 } 1433 1434 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1435 1436 // -- no tags 1437 if (idList.isEmpty()) { 1438 return tagMap; 1439 } 1440 1441 // -- get all tags for the idList 1442 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1443 1444 // -- build the map, key = resourceId, value = list of ResourceTag 1445 JpaPid resourceId; 1446 Collection<BaseTag> tagCol; 1447 for (ResourceHistoryTag tag : tagList) { 1448 1449 resourceId = tag.getResourcePid(); 1450 tagCol = tagMap.get(resourceId); 1451 if (tagCol == null) { 1452 tagCol = new ArrayList<>(); 1453 tagCol.add(tag); 1454 tagMap.put(resourceId, tagCol); 1455 } else { 1456 tagCol.add(tag); 1457 } 1458 } 1459 1460 return tagMap; 1461 } 1462 1463 @Nonnull 1464 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1465 Collection<ResourceHistoryTable> theHistoryTables) { 1466 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1467 1468 // -- find all resource has tags 1469 for (ResourceHistoryTable resource : theHistoryTables) { 1470 if (resource.isHasTags()) { 1471 idList.add(resource.getResourceId()); 1472 } 1473 } 1474 1475 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1476 1477 // -- no tags 1478 if (idList.isEmpty()) { 1479 return tagMap; 1480 } 1481 1482 // -- get all tags for the idList 1483 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1484 1485 // -- build the map, key = resourceId, value = list of ResourceTag 1486 JpaPid resourceId; 1487 Collection<BaseTag> tagCol; 1488 for (ResourceTag tag : tagList) { 1489 1490 resourceId = tag.getResourceId(); 1491 tagCol = tagMap.get(resourceId); 1492 if (tagCol == null) { 1493 tagCol = new ArrayList<>(); 1494 tagCol.add(tag); 1495 tagMap.put(resourceId, tagCol); 1496 } else { 1497 tagCol.add(tag); 1498 } 1499 } 1500 1501 return tagMap; 1502 } 1503 1504 @Override 1505 public void loadResourcesByPid( 1506 Collection<JpaPid> thePids, 1507 Collection<JpaPid> theIncludedPids, 1508 List<IBaseResource> theResourceListToPopulate, 1509 boolean theForHistoryOperation, 1510 RequestDetails theRequestDetails) { 1511 if (thePids.isEmpty()) { 1512 ourLog.debug("The include pids are empty"); 1513 } 1514 1515 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1516 // when running asserts 1517 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1518 1519 Map<Long, Integer> position = new HashMap<>(); 1520 int index = 0; 1521 for (JpaPid next : thePids) { 1522 position.put(next.getId(), index++); 1523 } 1524 1525 // Can we fast track this loading by checking elastic search? 1526 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1527 if (isUsingElasticSearch) { 1528 try { 1529 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1530 return; 1531 1532 } catch (ResourceNotFoundInIndexException theE) { 1533 // some resources were not found in index, so we will inform this and resort to JPA search 1534 ourLog.warn( 1535 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1536 } 1537 } 1538 1539 // We only chunk because some jdbc drivers can't handle long param lists. 1540 QueryChunker.chunk( 1541 thePids, 1542 t -> doLoadPids( 1543 theRequestDetails, 1544 t, 1545 theIncludedPids, 1546 theResourceListToPopulate, 1547 theForHistoryOperation, 1548 position)); 1549 } 1550 1551 /** 1552 * Check if we can load the resources from Hibernate Search instead of the database. 1553 * We assume this is faster. 1554 * <p> 1555 * Hibernate Search only stores the current version, and only if enabled. 1556 * 1557 * @param thePids the pids to check for versioned references 1558 * @return can we fetch from Hibernate Search? 1559 */ 1560 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1561 // is storage enabled? 1562 return myStorageSettings.isStoreResourceInHSearchIndex() 1563 && myStorageSettings.isHibernateSearchIndexSearchParams() 1564 && 1565 // we don't support history 1566 thePids.stream().noneMatch(p -> p.getVersion() != null) 1567 && 1568 // skip the complexity for metadata in dstu2 1569 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1570 } 1571 1572 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1573 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1574 // only impl 1575 // to handle lastN? 1576 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1577 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1578 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1579 1580 return myFulltextSearchSvc.getResources(pidList); 1581 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1582 // legacy LastN implementation 1583 return myIElasticsearchSvc.getObservationResources(thePids); 1584 } else { 1585 return Collections.emptyList(); 1586 } 1587 } 1588 1589 /** 1590 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1591 * so it can't be Collections.emptySet() or some such thing. 1592 * The JpaPid returned will have resource type populated. 1593 */ 1594 @Override 1595 public Set<JpaPid> loadIncludes( 1596 FhirContext theContext, 1597 EntityManager theEntityManager, 1598 Collection<JpaPid> theMatches, 1599 Collection<Include> theIncludes, 1600 boolean theReverseMode, 1601 DateRangeParam theLastUpdated, 1602 String theSearchIdOrDescription, 1603 RequestDetails theRequest, 1604 Integer theMaxCount) { 1605 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1606 parameters.setFhirContext(theContext); 1607 parameters.setEntityManager(theEntityManager); 1608 parameters.setMatches(theMatches); 1609 parameters.setIncludeFilters(theIncludes); 1610 parameters.setReverseMode(theReverseMode); 1611 parameters.setLastUpdated(theLastUpdated); 1612 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1613 parameters.setRequestDetails(theRequest); 1614 parameters.setMaxCount(theMaxCount); 1615 return loadIncludes(parameters); 1616 } 1617 1618 @Override 1619 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1620 Collection<JpaPid> matches = theParameters.getMatches(); 1621 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1622 boolean reverseMode = theParameters.isReverseMode(); 1623 EntityManager entityManager = theParameters.getEntityManager(); 1624 Integer maxCount = theParameters.getMaxCount(); 1625 FhirContext fhirContext = theParameters.getFhirContext(); 1626 RequestDetails request = theParameters.getRequestDetails(); 1627 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1628 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1629 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1630 IInterceptorBroadcaster compositeBroadcaster = 1631 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1632 1633 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1634 CurrentThreadCaptureQueriesListener.startCapturing(); 1635 } 1636 if (matches.isEmpty()) { 1637 return new HashSet<>(); 1638 } 1639 if (currentIncludes == null || currentIncludes.isEmpty()) { 1640 return new HashSet<>(); 1641 } 1642 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1643 String searchPartitionIdFieldName = 1644 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1645 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1646 String findPartitionIdFieldName = 1647 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1648 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1649 String findVersionFieldName = null; 1650 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1651 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1652 } 1653 1654 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1655 HashSet<JpaPid> allAdded = new HashSet<>(); 1656 HashSet<JpaPid> original = new HashSet<>(matches); 1657 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1658 1659 int roundCounts = 0; 1660 StopWatch w = new StopWatch(); 1661 1662 boolean addedSomeThisRound; 1663 do { 1664 roundCounts++; 1665 1666 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1667 1668 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1669 Include nextInclude = iter.next(); 1670 if (!nextInclude.isRecurse()) { 1671 iter.remove(); 1672 } 1673 1674 // Account for _include=* 1675 boolean matchAll = "*".equals(nextInclude.getValue()); 1676 1677 // Account for _include=[resourceType]:* 1678 String wantResourceType = null; 1679 if (!matchAll) { 1680 if ("*".equals(nextInclude.getParamName())) { 1681 wantResourceType = nextInclude.getParamType(); 1682 matchAll = true; 1683 } 1684 } 1685 1686 if (matchAll) { 1687 loadIncludesMatchAll( 1688 findPidFieldName, 1689 findPartitionIdFieldName, 1690 findResourceTypeFieldName, 1691 findVersionFieldName, 1692 searchPidFieldName, 1693 searchPartitionIdFieldName, 1694 wantResourceType, 1695 reverseMode, 1696 hasDesiredResourceTypes, 1697 nextRoundMatches, 1698 entityManager, 1699 maxCount, 1700 desiredResourceTypes, 1701 pidsToInclude, 1702 request); 1703 } else { 1704 loadIncludesMatchSpecific( 1705 nextInclude, 1706 fhirContext, 1707 findPidFieldName, 1708 findPartitionIdFieldName, 1709 findVersionFieldName, 1710 searchPidFieldName, 1711 reverseMode, 1712 nextRoundMatches, 1713 entityManager, 1714 maxCount, 1715 pidsToInclude, 1716 request); 1717 } 1718 } 1719 1720 nextRoundMatches.clear(); 1721 for (JpaPid next : pidsToInclude) { 1722 if (!original.contains(next) && !allAdded.contains(next)) { 1723 nextRoundMatches.add(next); 1724 } else { 1725 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1726 } 1727 } 1728 1729 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1730 1731 if (maxCount != null && allAdded.size() >= maxCount) { 1732 break; 1733 } 1734 1735 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1736 1737 allAdded.removeAll(original); 1738 1739 ourLog.info( 1740 "Loaded {} {} in {} rounds and {} ms for search {}", 1741 allAdded.size(), 1742 reverseMode ? "_revincludes" : "_includes", 1743 roundCounts, 1744 w.getMillisAndRestart(), 1745 searchIdOrDescription); 1746 1747 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1748 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1749 } 1750 1751 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1752 // This can be used to remove results from the search result details before 1753 // the user has a chance to know that they were in the results 1754 if (!allAdded.isEmpty()) { 1755 1756 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1757 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1758 JpaPreResourceAccessDetails accessDetails = 1759 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1760 HookParams params = new HookParams() 1761 .add(IPreResourceAccessDetails.class, accessDetails) 1762 .add(RequestDetails.class, request) 1763 .addIfMatchesType(ServletRequestDetails.class, request); 1764 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1765 1766 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1767 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1768 JpaPid value = includedPidList.remove(i); 1769 if (value != null) { 1770 allAdded.remove(value); 1771 } 1772 } 1773 } 1774 } 1775 } 1776 1777 return allAdded; 1778 } 1779 1780 private void loadIncludesMatchSpecific( 1781 Include nextInclude, 1782 FhirContext fhirContext, 1783 String findPidFieldName, 1784 String findPartitionFieldName, 1785 String findVersionFieldName, 1786 String searchPidFieldName, 1787 boolean reverseMode, 1788 List<JpaPid> nextRoundMatches, 1789 EntityManager entityManager, 1790 Integer maxCount, 1791 HashSet<JpaPid> pidsToInclude, 1792 RequestDetails theRequest) { 1793 List<String> paths; 1794 1795 // Start replace 1796 RuntimeSearchParam param; 1797 String resType = nextInclude.getParamType(); 1798 if (isBlank(resType)) { 1799 return; 1800 } 1801 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1802 if (def == null) { 1803 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1804 return; 1805 } 1806 1807 String paramName = nextInclude.getParamName(); 1808 if (isNotBlank(paramName)) { 1809 param = mySearchParamRegistry.getActiveSearchParam( 1810 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1811 } else { 1812 param = null; 1813 } 1814 if (param == null) { 1815 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1816 return; 1817 } 1818 1819 paths = param.getPathsSplitForResourceType(resType); 1820 // end replace 1821 1822 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1823 1824 for (String nextPath : paths) { 1825 String findPidFieldSqlColumn = 1826 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1827 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1828 if (findVersionFieldName != null) { 1829 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1830 } 1831 if (myPartitionSettings.isDatabasePartitionMode()) { 1832 fieldsToLoad += ", r."; 1833 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1834 ? "partition_id" 1835 : "target_res_partition_id"; 1836 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1837 } 1838 1839 // Query for includes lookup has 2 cases 1840 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1841 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1842 // url in target_resource_url 1843 1844 // Case 1: 1845 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1846 1847 String searchPidFieldSqlColumn = 1848 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1849 StringBuilder localReferenceQuery = new StringBuilder(); 1850 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1851 localReferenceQuery.append(" FROM hfj_res_link r "); 1852 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1853 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1854 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1855 } 1856 localReferenceQuery 1857 .append(" AND r.") 1858 .append(searchPidFieldSqlColumn) 1859 .append(" IN (:target_pids) "); 1860 if (myPartitionSettings.isDatabasePartitionMode()) { 1861 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1862 ? "target_res_partition_id" 1863 : "partition_id"; 1864 localReferenceQuery 1865 .append("AND r.") 1866 .append(partitionFieldToSearch) 1867 .append(" = :search_partition_id "); 1868 } 1869 localReferenceQueryParams.put("src_path", nextPath); 1870 // we loop over target_pids later. 1871 if (targetResourceTypes != null) { 1872 if (targetResourceTypes.size() == 1) { 1873 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1874 localReferenceQueryParams.put( 1875 "target_resource_type", 1876 targetResourceTypes.iterator().next()); 1877 } else { 1878 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1879 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1880 } 1881 } 1882 1883 // Case 2: 1884 Pair<String, Map<String, Object>> canonicalQuery = 1885 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1886 1887 String sql = localReferenceQuery.toString(); 1888 if (canonicalQuery != null) { 1889 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1890 } 1891 1892 Map<String, Object> limitParams = new HashMap<>(); 1893 if (maxCount != null) { 1894 LinkedList<Object> bindVariables = new LinkedList<>(); 1895 sql = SearchQueryBuilder.applyLimitToSql( 1896 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1897 1898 // The dialect SQL limiter uses positional params, but we're using 1899 // named params here, so we need to replace the positional params 1900 // with equivalent named ones 1901 StringBuilder sb = new StringBuilder(); 1902 for (int i = 0; i < sql.length(); i++) { 1903 char nextChar = sql.charAt(i); 1904 if (nextChar == '?') { 1905 String nextName = "limit" + i; 1906 sb.append(':').append(nextName); 1907 limitParams.put(nextName, bindVariables.removeFirst()); 1908 } else { 1909 sb.append(nextChar); 1910 } 1911 } 1912 sql = sb.toString(); 1913 } 1914 1915 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1916 for (Collection<JpaPid> nextPartition : partitions) { 1917 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1918 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1919 if (myPartitionSettings.isDatabasePartitionMode()) { 1920 q.setParameter( 1921 "search_partition_id", 1922 nextPartition.iterator().next().getPartitionId()); 1923 } 1924 localReferenceQueryParams.forEach(q::setParameter); 1925 if (canonicalQuery != null) { 1926 canonicalQuery.getRight().forEach(q::setParameter); 1927 } 1928 limitParams.forEach(q::setParameter); 1929 1930 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1931 Tuple result; 1932 while (iter.hasNext()) { 1933 result = iter.next(); 1934 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1935 Long resourceVersion = null; 1936 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1937 resourceVersion = 1938 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1939 } 1940 Integer partitionId = null; 1941 if (myPartitionSettings.isDatabasePartitionMode()) { 1942 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1943 } 1944 1945 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1946 pid.setPartitionId(partitionId); 1947 pidsToInclude.add(pid); 1948 } 1949 } 1950 // myEntityManager.clear(); 1951 } 1952 } 1953 } 1954 1955 private void loadIncludesMatchAll( 1956 String findPidFieldName, 1957 String findPartitionFieldName, 1958 String findResourceTypeFieldName, 1959 String findVersionFieldName, 1960 String searchPidFieldName, 1961 String searchPartitionFieldName, 1962 String wantResourceType, 1963 boolean reverseMode, 1964 boolean hasDesiredResourceTypes, 1965 List<JpaPid> nextRoundMatches, 1966 EntityManager entityManager, 1967 Integer maxCount, 1968 List<String> desiredResourceTypes, 1969 HashSet<JpaPid> pidsToInclude, 1970 RequestDetails request) { 1971 1972 record IncludesRecord( 1973 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1974 1975 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1976 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1977 Root<ResourceLink> root = query.from(ResourceLink.class); 1978 1979 List<Selection<?>> selectionList = new ArrayList<>(); 1980 selectionList.add(root.get(findPidFieldName)); 1981 selectionList.add(root.get(findResourceTypeFieldName)); 1982 selectionList.add(root.get("myTargetResourceUrl")); 1983 if (findVersionFieldName != null) { 1984 selectionList.add(root.get(findVersionFieldName)); 1985 } else { 1986 selectionList.add(cb.nullLiteral(Long.class)); 1987 } 1988 if (myPartitionSettings.isDatabasePartitionMode()) { 1989 selectionList.add(root.get(findPartitionFieldName)); 1990 } else { 1991 selectionList.add(cb.nullLiteral(Integer.class)); 1992 } 1993 query.multiselect(selectionList); 1994 1995 List<Predicate> predicates = new ArrayList<>(); 1996 1997 if (myPartitionSettings.isDatabasePartitionMode()) { 1998 predicates.add( 1999 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 2000 } 2001 2002 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 2003 2004 /* 2005 * We need to set the resource type in 2 cases only: 2006 * 1) we are in $everything mode 2007 * (where we only want to fetch specific resource types, regardless of what is 2008 * available to fetch) 2009 * 2) we are doing revincludes 2010 * 2011 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 2012 * should always be checking the source resource type on the resource link. We don't 2013 * actually index that column though by default, so in order to try and be efficient 2014 * we don't actually include it for includes (but we do for revincludes). This is 2015 * because for an include, it doesn't really make sense to include a different 2016 * resource type than the one you are searching on. 2017 */ 2018 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 2019 // because mySourceResourceType is not part of the HFJ_RES_LINK 2020 // index, this might not be the most optimal performance. 2021 // but it is for an $everything operation (and maybe we should update the index) 2022 predicates.add( 2023 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 2024 } else { 2025 wantResourceType = null; 2026 } 2027 2028 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 2029 // resources 2030 // (e.g. via Provenance, List, or Group) when in an $everything operation 2031 if (myParams != null 2032 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 2033 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 2034 predicates.add(cb.not(root.get("mySourceResourceType") 2035 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 2036 } 2037 2038 if (hasDesiredResourceTypes) { 2039 predicates.add( 2040 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 2041 } 2042 2043 query.where(cb.and(predicates.toArray(new Predicate[0]))); 2044 2045 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 2046 for (Collection<JpaPid> nextPartition : partitions) { 2047 2048 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 2049 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 2050 if (myPartitionSettings.isDatabasePartitionMode()) { 2051 q.setParameter( 2052 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 2053 } 2054 if (wantResourceType != null) { 2055 q.setParameter("want_resource_type", wantResourceType); 2056 } 2057 if (maxCount != null) { 2058 q.setMaxResults(maxCount); 2059 } 2060 if (hasDesiredResourceTypes) { 2061 q.setParameter("desired_target_resource_types", desiredResourceTypes); 2062 } 2063 2064 Set<String> canonicalUrls = null; 2065 2066 try (ScrollableResultsIterator<IncludesRecord> iter = 2067 new ScrollableResultsIterator<>(toScrollableResults(q))) { 2068 IncludesRecord nextRow; 2069 while (iter.hasNext()) { 2070 nextRow = iter.next(); 2071 if (nextRow == null) { 2072 // This can happen if there are outgoing references which are canonical or point to 2073 // other servers 2074 continue; 2075 } 2076 2077 Long version = nextRow.version; 2078 Long resourceId = nextRow.resourceId; 2079 String resourceType = nextRow.resourceType; 2080 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 2081 Integer partitionId = nextRow.partitionId; 2082 2083 if (resourceId != null) { 2084 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 2085 pid.setPartitionId(partitionId); 2086 pidsToInclude.add(pid); 2087 } else if (resourceCanonicalUrl != null) { 2088 if (canonicalUrls == null) { 2089 canonicalUrls = new HashSet<>(); 2090 } 2091 canonicalUrls.add(resourceCanonicalUrl); 2092 } 2093 } 2094 } 2095 2096 if (canonicalUrls != null) { 2097 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 2098 } 2099 } 2100 } 2101 2102 private void loadCanonicalUrls( 2103 RequestDetails theRequestDetails, 2104 Set<String> theCanonicalUrls, 2105 EntityManager theEntityManager, 2106 HashSet<JpaPid> thePidsToInclude, 2107 boolean theReverse) { 2108 StringBuilder sqlBuilder; 2109 CanonicalUrlTargets canonicalUrlTargets = 2110 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2111 if (canonicalUrlTargets.isEmpty()) { 2112 return; 2113 } 2114 2115 String message = 2116 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2117 firePerformanceWarning(theRequestDetails, message); 2118 2119 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2120 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2121 2122 sqlBuilder = new StringBuilder(); 2123 sqlBuilder.append("SELECT "); 2124 if (myPartitionSettings.isPartitioningEnabled()) { 2125 sqlBuilder.append("i.myPartitionIdValue, "); 2126 } 2127 sqlBuilder.append("i.myResourcePid "); 2128 2129 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2130 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2131 sqlBuilder.append("AND i.myUri IN (:uris)"); 2132 2133 String canonicalResSql = sqlBuilder.toString(); 2134 2135 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2136 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2137 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2138 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2139 List<Object[]> results = canonicalResIdQuery.getResultList(); 2140 for (var next : results) { 2141 if (next != null) { 2142 Integer partitionId = null; 2143 Long pid; 2144 if (next.length == 1) { 2145 pid = (Long) next[0]; 2146 } else { 2147 partitionId = (Integer) ((Object[]) next)[0]; 2148 pid = (Long) ((Object[]) next)[1]; 2149 } 2150 if (pid != null) { 2151 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2152 } 2153 } 2154 } 2155 } 2156 } 2157 2158 /** 2159 * Calls Performance Trace Hook 2160 * 2161 * @param request the request deatils 2162 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2163 */ 2164 private void callRawSqlHookWithCurrentThreadQueries( 2165 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2166 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2167 HookParams params = new HookParams() 2168 .add(RequestDetails.class, request) 2169 .addIfMatchesType(ServletRequestDetails.class, request) 2170 .add(SqlQueryList.class, capturedQueries); 2171 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2172 } 2173 2174 @Nullable 2175 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2176 String targetResourceType = nextInclude.getParamTargetType(); 2177 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2178 Set<String> targetResourceTypes; 2179 if (targetResourceType != null) { 2180 targetResourceTypes = Set.of(targetResourceType); 2181 } else if (haveTargetTypesDefinedByParam) { 2182 targetResourceTypes = param.getTargets(); 2183 } else { 2184 // all types! 2185 targetResourceTypes = null; 2186 } 2187 return targetResourceTypes; 2188 } 2189 2190 @Nullable 2191 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2192 String theVersionFieldName, 2193 Set<String> theTargetResourceTypes, 2194 boolean theReverse, 2195 RequestDetails theRequest, 2196 RuntimeSearchParam theParam) { 2197 2198 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2199 2200 // If we know for sure that none of the paths involved in this SearchParameter could 2201 // be indexing a canonical 2202 if (Arrays.stream(searchParameterPaths) 2203 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2204 myContext, myResourceName, t, theReverse))) { 2205 return null; 2206 } 2207 2208 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2209 if (theVersionFieldName != null) { 2210 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2211 fieldsToLoadFromSpidxUriTable += ", NULL"; 2212 } 2213 2214 if (myPartitionSettings.isDatabasePartitionMode()) { 2215 if (theReverse) { 2216 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2217 } else { 2218 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2219 } 2220 } 2221 2222 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2223 // But sp_name isn't indexed, so we use hash_identity instead. 2224 CanonicalUrlTargets canonicalUrlTargets = 2225 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2226 if (canonicalUrlTargets.isEmpty()) { 2227 return null; 2228 } 2229 2230 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2231 StringBuilder canonicalUrlQuery = new StringBuilder(); 2232 canonicalUrlQuery 2233 .append("SELECT ") 2234 .append(fieldsToLoadFromSpidxUriTable) 2235 .append(' '); 2236 canonicalUrlQuery.append("FROM hfj_res_link r "); 2237 2238 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2239 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2240 if (myPartitionSettings.isDatabasePartitionMode()) { 2241 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2242 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2243 } 2244 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2245 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2246 canonicalUriQueryParams.put( 2247 "uri_identity_hash", 2248 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2249 } else { 2250 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2251 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2252 } 2253 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2254 canonicalUrlQuery.append(")"); 2255 2256 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2257 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2258 canonicalUrlQuery.append(" AND"); 2259 if (myPartitionSettings.isDatabasePartitionMode()) { 2260 if (theReverse) { 2261 canonicalUrlQuery.append(" rUri.partition_id"); 2262 } else { 2263 canonicalUrlQuery.append(" r.partition_id"); 2264 } 2265 canonicalUrlQuery.append(" = :search_partition_id"); 2266 canonicalUrlQuery.append(" AND"); 2267 } 2268 if (theReverse) { 2269 canonicalUrlQuery.append(" rUri.res_id"); 2270 } else { 2271 canonicalUrlQuery.append(" r.src_resource_id"); 2272 } 2273 canonicalUrlQuery.append(" IN (:target_pids)"); 2274 2275 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2276 } 2277 2278 @Nonnull 2279 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2280 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2281 Set<String> targetResourceTypes = theTargetResourceTypes; 2282 if (targetResourceTypes == null) { 2283 /* 2284 * If we don't have a list of valid target types, we need to figure out a list of all 2285 * possible target types in order to perform the search of the URI index table. This is 2286 * because the hash_identity column encodes the resource type, so we'll need a hash 2287 * value for each possible target type. 2288 */ 2289 targetResourceTypes = new HashSet<>(); 2290 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2291 if (theReverse) { 2292 // For reverse includes, it is really hard to figure out what types 2293 // are actually potentially pointing to the type we're searching for 2294 // in this context, so let's just assume it could be anything. 2295 targetResourceTypes = possibleTypes; 2296 } else { 2297 List<RuntimeSearchParam> params = mySearchParamRegistry 2298 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2299 .values() 2300 .stream() 2301 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2302 .toList(); 2303 for (var next : params) { 2304 2305 String paths = next.getPath(); 2306 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2307 2308 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2309 myContext, myResourceName, path, theReverse)) { 2310 continue; 2311 } 2312 2313 if (!next.getTargets().isEmpty()) { 2314 // For each reference parameter on the resource type we're searching for, 2315 // add all the potential target types to the list of possible target 2316 // resource types we can look up. 2317 for (var nextTarget : next.getTargets()) { 2318 if (possibleTypes.contains(nextTarget)) { 2319 targetResourceTypes.add(nextTarget); 2320 } 2321 } 2322 } else { 2323 // If we have any references that don't define any target types, then 2324 // we need to assume that all enabled resource types are possible target 2325 // types 2326 targetResourceTypes.addAll(possibleTypes); 2327 break; 2328 } 2329 } 2330 } 2331 } 2332 } 2333 2334 if (targetResourceTypes.isEmpty()) { 2335 return new CanonicalUrlTargets(Set.of(), Set.of()); 2336 } 2337 2338 Set<Long> hashIdentityValues = new HashSet<>(); 2339 Set<Integer> partitionIds = new HashSet<>(); 2340 for (String type : targetResourceTypes) { 2341 2342 RequestPartitionId readPartition; 2343 if (myPartitionSettings.isPartitioningEnabled()) { 2344 readPartition = 2345 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2346 } else { 2347 readPartition = RequestPartitionId.defaultPartition(); 2348 } 2349 if (readPartition.hasPartitionIds()) { 2350 partitionIds.addAll(readPartition.getPartitionIds()); 2351 } 2352 2353 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2354 myPartitionSettings, readPartition, type, "url"); 2355 hashIdentityValues.add(hashIdentity); 2356 } 2357 2358 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2359 } 2360 2361 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2362 public boolean isEmpty() { 2363 return hashIdentityValues.isEmpty(); 2364 } 2365 } 2366 2367 /** 2368 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2369 * those pids where: 2370 * <ul> 2371 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2372 * <li>Each list only contains JpaPids with the same partition ID</li> 2373 * </ul> 2374 */ 2375 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2376 2377 if (theNextRoundMatches.size() <= theMaxLoad) { 2378 boolean allSamePartition = true; 2379 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2380 if (!Objects.equals( 2381 theNextRoundMatches.get(i - 1).getPartitionId(), 2382 theNextRoundMatches.get(i).getPartitionId())) { 2383 allSamePartition = false; 2384 break; 2385 } 2386 } 2387 if (allSamePartition) { 2388 return Collections.singletonList(theNextRoundMatches); 2389 } 2390 } 2391 2392 // Break into partitioned sublists 2393 ListMultimap<String, JpaPid> lists = 2394 MultimapBuilder.hashKeys().arrayListValues().build(); 2395 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2396 String partitionId = nextRoundMatch.getPartitionId() != null 2397 ? nextRoundMatch.getPartitionId().toString() 2398 : ""; 2399 lists.put(partitionId, nextRoundMatch); 2400 } 2401 2402 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2403 for (String key : lists.keySet()) { 2404 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2405 retVal.addAll(nextPartition); 2406 } 2407 2408 // In unit test mode, we sort the results just for unit test predictability 2409 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2410 retVal = retVal.stream() 2411 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2412 .collect(Collectors.toList()); 2413 } 2414 2415 return retVal; 2416 } 2417 2418 private void attemptComboUniqueSpProcessing( 2419 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2420 RuntimeSearchParam comboParam = null; 2421 List<String> comboParamNames = null; 2422 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2423 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2424 if (!exactMatchParams.isEmpty()) { 2425 comboParam = exactMatchParams.get(0); 2426 comboParamNames = new ArrayList<>(theParams.keySet()); 2427 } 2428 2429 if (comboParam == null) { 2430 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2431 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2432 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2433 List<String> nextCandidateParamNames = 2434 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2435 .map(RuntimeSearchParam::getName) 2436 .collect(Collectors.toList()); 2437 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2438 comboParam = nextCandidate; 2439 comboParamNames = nextCandidateParamNames; 2440 break; 2441 } 2442 } 2443 } 2444 2445 if (comboParam != null) { 2446 Collections.sort(comboParamNames); 2447 2448 // Since we're going to remove elements below 2449 theParams.values().forEach(this::ensureSubListsAreWritable); 2450 2451 /* 2452 * Apply search against the combo param index in a loop: 2453 * 2454 * 1. First we check whether the actual parameter values in the 2455 * parameter map are actually usable for searching against the combo 2456 * param index. E.g. no search modifiers, date comparators, etc., 2457 * since these mean you can't use the combo index. 2458 * 2459 * 2. Apply and create the join SQl. We remove parameter values from 2460 * the map as we apply them, so any parameter values remaining in the 2461 * map after each loop haven't yet been factored into the SQL. 2462 * 2463 * The loop allows us to create multiple combo index joins if there 2464 * are multiple AND expressions for the related parameters. 2465 */ 2466 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2467 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2468 } 2469 } 2470 } 2471 2472 private void applyComboSearchParam( 2473 QueryStack theQueryStack, 2474 @Nonnull SearchParameterMap theParams, 2475 RequestDetails theRequest, 2476 List<String> theComboParamNames, 2477 RuntimeSearchParam theComboParam) { 2478 2479 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2480 for (String nextParamName : theComboParamNames) { 2481 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2482 inputs.add(nextValues); 2483 } 2484 2485 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2486 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2487 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2488 2489 StringBuilder searchStringBuilder = new StringBuilder(); 2490 searchStringBuilder.append(myResourceName); 2491 searchStringBuilder.append("?"); 2492 2493 boolean first = true; 2494 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2495 2496 String nextParamName = theComboParamNames.get(paramIndex); 2497 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2498 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2499 // As a result, we strip the prefix if present. 2500 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(), EQUAL.getValue()); 2501 2502 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2503 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2504 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2505 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2506 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2507 } 2508 } 2509 2510 if (first) { 2511 first = false; 2512 } else { 2513 searchStringBuilder.append('&'); 2514 } 2515 2516 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2517 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2518 2519 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2520 } 2521 2522 String indexString = searchStringBuilder.toString(); 2523 ourLog.debug( 2524 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2525 2526 indexStrings.add(indexString); 2527 } 2528 2529 // Just to make sure we're stable for tests 2530 indexStrings.sort(Comparator.naturalOrder()); 2531 2532 // Interceptor broadcast: JPA_PERFTRACE_INFO 2533 IInterceptorBroadcaster compositeBroadcaster = 2534 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2535 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2536 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2537 StorageProcessingMessage msg = new StorageProcessingMessage() 2538 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2539 + indexStringForLog); 2540 HookParams params = new HookParams() 2541 .add(RequestDetails.class, theRequest) 2542 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2543 .add(StorageProcessingMessage.class, msg); 2544 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2545 } 2546 2547 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2548 case UNIQUE: 2549 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2550 break; 2551 case NON_UNIQUE: 2552 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2553 break; 2554 } 2555 2556 // Remove any empty parameters remaining after this 2557 theParams.clean(); 2558 } 2559 2560 /** 2561 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2562 * searching against a combo param with the given parameter names. This might be {@literal false} if 2563 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2564 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2565 */ 2566 private boolean validateParamValuesAreValidForComboParam( 2567 RequestDetails theRequest, 2568 @Nonnull SearchParameterMap theParams, 2569 List<String> theComboParamNames, 2570 RuntimeSearchParam theComboParam) { 2571 boolean paramValuesAreValidForCombo = true; 2572 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2573 2574 for (String nextParamName : theComboParamNames) { 2575 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2576 2577 if (nextValues == null || nextValues.isEmpty()) { 2578 paramValuesAreValidForCombo = false; 2579 break; 2580 } 2581 2582 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2583 paramOrValues.add(nextAndValue); 2584 2585 for (IQueryParameterType nextOrValue : nextAndValue) { 2586 if (nextOrValue instanceof DateParam dateParam) { 2587 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2588 String message = "Search with params " + theComboParamNames 2589 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2590 + nextParamName + "'"; 2591 firePerformanceInfo(theRequest, message); 2592 paramValuesAreValidForCombo = false; 2593 break; 2594 } 2595 } 2596 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2597 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2598 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2599 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2600 if (prefix != null && prefix != EQUAL) { 2601 String message = "Search with params " + theComboParamNames 2602 + " is not a candidate for combo searching - Parameter '" + nextParamName 2603 + "' has prefix: '" 2604 + paramWithPrefix.getPrefix().getValue() + "'"; 2605 firePerformanceInfo(theRequest, message); 2606 paramValuesAreValidForCombo = false; 2607 break; 2608 } 2609 } 2610 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2611 String message = "Search with params " + theComboParamNames 2612 + " is not a candidate for combo searching - Parameter '" + nextParamName 2613 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2614 firePerformanceInfo(theRequest, message); 2615 paramValuesAreValidForCombo = false; 2616 break; 2617 } 2618 } 2619 2620 // Reference params are only eligible for using a composite index if they 2621 // are qualified 2622 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2623 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2624 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2625 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2626 if (isBlank(param.getResourceType())) { 2627 ourLog.debug( 2628 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2629 paramValuesAreValidForCombo = false; 2630 break; 2631 } 2632 } 2633 2634 // Date params are not eligible for using composite unique index 2635 // as index could contain date with different precision (e.g. DAY, SECOND) 2636 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2637 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2638 ourLog.debug( 2639 "Search with params {} is not a candidate for combo searching - " 2640 + "Unique combo search parameter '{}' has DATE type", 2641 theComboParamNames, 2642 nextParamName); 2643 paramValuesAreValidForCombo = false; 2644 break; 2645 } 2646 } 2647 2648 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2649 ourLog.debug( 2650 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2651 paramValuesAreValidForCombo = false; 2652 } 2653 2654 return paramValuesAreValidForCombo; 2655 } 2656 2657 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2658 for (int i = 0; i < theListOfLists.size(); i++) { 2659 List<T> oldSubList = theListOfLists.get(i); 2660 if (!(oldSubList instanceof ArrayList)) { 2661 List<T> newSubList = new ArrayList<>(oldSubList); 2662 theListOfLists.set(i, newSubList); 2663 } 2664 } 2665 } 2666 2667 @Override 2668 public void setFetchSize(int theFetchSize) { 2669 myFetchSize = theFetchSize; 2670 } 2671 2672 public SearchParameterMap getParams() { 2673 return myParams; 2674 } 2675 2676 public CriteriaBuilder getBuilder() { 2677 return myCriteriaBuilder; 2678 } 2679 2680 public Class<? extends IBaseResource> getResourceType() { 2681 return myResourceType; 2682 } 2683 2684 public String getResourceName() { 2685 return myResourceName; 2686 } 2687 2688 /** 2689 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2690 */ 2691 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2692 2693 private final RequestDetails myRequest; 2694 private final Set<JpaPid> myCurrentPids; 2695 private Iterator<JpaPid> myCurrentIterator; 2696 private JpaPid myNext; 2697 2698 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2699 myCurrentPids = new HashSet<>(thePidSet); 2700 myCurrentIterator = null; 2701 myRequest = theRequest; 2702 } 2703 2704 private void fetchNext() { 2705 while (myNext == null) { 2706 2707 if (myCurrentIterator == null) { 2708 Set<Include> includes = new HashSet<>(); 2709 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2710 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2711 for (IQueryParameterType type : typeList) { 2712 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken()); 2713 for (String resourceType : queryString.split(",")) { 2714 String rt = resourceType.trim(); 2715 if (isNotBlank(rt)) { 2716 includes.add(new Include(rt + ":*", true)); 2717 } 2718 } 2719 } 2720 } 2721 } 2722 if (includes.isEmpty()) { 2723 includes.add(new Include("*", true)); 2724 } 2725 Set<JpaPid> newPids = loadIncludes( 2726 myContext, 2727 myEntityManager, 2728 myCurrentPids, 2729 includes, 2730 false, 2731 getParams().getLastUpdated(), 2732 mySearchUuid, 2733 myRequest, 2734 null); 2735 myCurrentIterator = newPids.iterator(); 2736 } 2737 2738 if (myCurrentIterator.hasNext()) { 2739 myNext = myCurrentIterator.next(); 2740 } else { 2741 myNext = NO_MORE; 2742 } 2743 } 2744 } 2745 2746 @Override 2747 public boolean hasNext() { 2748 fetchNext(); 2749 return !NO_MORE.equals(myNext); 2750 } 2751 2752 @Override 2753 public JpaPid next() { 2754 fetchNext(); 2755 JpaPid retVal = myNext; 2756 myNext = null; 2757 return retVal; 2758 } 2759 } 2760 /** 2761 * Basic Query iterator, used to fetch the results of a query. 2762 */ 2763 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2764 2765 private final SearchRuntimeDetails mySearchRuntimeDetails; 2766 2767 private final RequestDetails myRequest; 2768 private final boolean myHaveRawSqlHooks; 2769 private final boolean myHavePerfTraceFoundIdHook; 2770 private final Integer myOffset; 2771 private final IInterceptorBroadcaster myCompositeBroadcaster; 2772 private boolean myFirst = true; 2773 private IncludesIterator myIncludesIterator; 2774 /** 2775 * The next JpaPid value of the next result in this query. 2776 * Will not be null if fetched using getNext() 2777 */ 2778 private JpaPid myNext; 2779 /** 2780 * The current query result iterator running sql and supplying PIDs 2781 * @see #myQueryList 2782 */ 2783 private ISearchQueryExecutor myResultsIterator; 2784 2785 private boolean myFetchIncludesForEverythingOperation; 2786 2787 /** 2788 * The count of resources skipped because they were seen in earlier results 2789 */ 2790 private int mySkipCount = 0; 2791 /** 2792 * The count of resources that are new in this search 2793 * (ie, not cached in previous searches) 2794 */ 2795 private int myNonSkipCount = 0; 2796 /** 2797 * The list of queries to use to find all results. 2798 * Normal JPA queries will normally have a single entry. 2799 * Queries that involve Hibernate Search/Elasticsearch may have 2800 * multiple queries because of chunking. 2801 * The $everything operation also jams some extra results in. 2802 */ 2803 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2804 2805 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2806 mySearchRuntimeDetails = theSearchRuntimeDetails; 2807 myOffset = myParams.getOffset(); 2808 myRequest = theRequest; 2809 myCompositeBroadcaster = 2810 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2811 2812 // everything requires fetching recursively all related resources 2813 if (myParams.getEverythingMode() != null) { 2814 myFetchIncludesForEverythingOperation = true; 2815 } 2816 2817 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2818 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2819 } 2820 2821 private void fetchNext() { 2822 try { 2823 if (myHaveRawSqlHooks) { 2824 CurrentThreadCaptureQueriesListener.startCapturing(); 2825 } 2826 2827 // If we don't have a query yet, create one 2828 if (myResultsIterator == null) { 2829 if (!mySearchProperties.hasMaxResultsRequested()) { 2830 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2831 } 2832 2833 /* 2834 * assigns the results iterator 2835 * and populates the myQueryList. 2836 */ 2837 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2838 } 2839 2840 if (myNext == null) { 2841 // no next means we need a new query (if one is available) 2842 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2843 /* 2844 * Because we combine our DB searches with Lucene 2845 * sometimes we can have multiple results iterators 2846 * (with only some having data in them to extract). 2847 * 2848 * We'll iterate our results iterators until we 2849 * either run out of results iterators, or we 2850 * have one that actually has data in it. 2851 */ 2852 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2853 retrieveNextIteratorQuery(); 2854 } 2855 2856 if (!myResultsIterator.hasNext()) { 2857 // we couldn't find a results iterator; 2858 // we're done here 2859 break; 2860 } 2861 2862 JpaPid nextPid = myResultsIterator.next(); 2863 if (myHavePerfTraceFoundIdHook) { 2864 callPerformanceTracingHook(nextPid); 2865 } 2866 2867 if (nextPid != null) { 2868 if (!myPidSet.contains(nextPid)) { 2869 if (!mySearchProperties.isDeduplicateInDatabase()) { 2870 /* 2871 * We only add to the map if we aren't fetching "everything"; 2872 * otherwise, we let the de-duplication happen in the database 2873 * (see createChunkedQueryNormalSearch above), because it 2874 * saves memory that way. 2875 */ 2876 myPidSet.add(nextPid); 2877 } 2878 if (doNotSkipNextPidForEverything()) { 2879 myNext = nextPid; 2880 myNonSkipCount++; 2881 break; 2882 } 2883 } else { 2884 mySkipCount++; 2885 } 2886 } 2887 2888 if (!myResultsIterator.hasNext()) { 2889 if (mySearchProperties.hasMaxResultsRequested() 2890 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2891 if (mySkipCount > 0 && myNonSkipCount == 0) { 2892 sendProcessingMsgAndFirePerformanceHook(); 2893 // need the next iterator; increase the maxsize 2894 // (we should always do this) 2895 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2896 mySearchProperties.setMaxResultsRequested(maxResults); 2897 2898 if (!mySearchProperties.isDeduplicateInDatabase()) { 2899 // if we're not using the database to deduplicate 2900 // we should recheck our memory usage 2901 // the prefetch size check is future proofing 2902 int prefetchSize = myStorageSettings 2903 .getSearchPreFetchThresholds() 2904 .size(); 2905 if (prefetchSize > 0) { 2906 if (myStorageSettings 2907 .getSearchPreFetchThresholds() 2908 .get(prefetchSize - 1) 2909 < mySearchProperties.getMaxResultsRequested()) { 2910 mySearchProperties.setDeduplicateInDatabase(true); 2911 } 2912 } 2913 } 2914 2915 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2916 } 2917 } 2918 } 2919 } 2920 } 2921 2922 if (myNext == null) { 2923 // if we got here, it means the current JpaPid has already been processed, 2924 // and we will decide (here) if we need to fetch related resources recursively 2925 if (myFetchIncludesForEverythingOperation) { 2926 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2927 myFetchIncludesForEverythingOperation = false; 2928 } 2929 if (myIncludesIterator != null) { 2930 while (myIncludesIterator.hasNext()) { 2931 JpaPid next = myIncludesIterator.next(); 2932 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2933 myNext = next; 2934 break; 2935 } 2936 } 2937 if (myNext == null) { 2938 myNext = NO_MORE; 2939 } 2940 } else { 2941 myNext = NO_MORE; 2942 } 2943 } 2944 2945 if (!mySearchProperties.hasMaxResultsRequested()) { 2946 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2947 } else { 2948 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2949 } 2950 2951 } finally { 2952 // search finished - fire hooks 2953 if (myHaveRawSqlHooks) { 2954 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2955 } 2956 } 2957 2958 if (myFirst) { 2959 HookParams params = new HookParams() 2960 .add(RequestDetails.class, myRequest) 2961 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2962 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2963 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2964 myFirst = false; 2965 } 2966 2967 if (NO_MORE.equals(myNext)) { 2968 HookParams params = new HookParams() 2969 .add(RequestDetails.class, myRequest) 2970 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2971 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2972 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2973 } 2974 } 2975 2976 private Integer calculateMaxResultsToFetch() { 2977 if (myParams.getLoadSynchronousUpTo() != null) { 2978 return myParams.getLoadSynchronousUpTo(); 2979 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2980 return myParams.getEverythingMode() != null 2981 ? myParams.getOffset() + myParams.getCount() 2982 : myParams.getCount(); 2983 } else { 2984 return myStorageSettings.getFetchSizeDefaultMaximum(); 2985 } 2986 } 2987 2988 private boolean doNotSkipNextPidForEverything() { 2989 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2990 } 2991 2992 private void callPerformanceTracingHook(JpaPid theNextPid) { 2993 HookParams params = new HookParams() 2994 .add(Integer.class, System.identityHashCode(this)) 2995 .add(Object.class, theNextPid); 2996 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2997 } 2998 2999 private void sendProcessingMsgAndFirePerformanceHook() { 3000 String msg = "Pass completed with no matching results seeking rows " 3001 + myPidSet.size() + "-" + mySkipCount 3002 + ". This indicates an inefficient query! Retrying with new max count of " 3003 + mySearchProperties.getMaxResultsRequested(); 3004 firePerformanceWarning(myRequest, msg); 3005 } 3006 3007 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 3008 Integer offset = theOffset; 3009 if (myQueryList.isEmpty()) { 3010 // Capture times for Lucene/Elasticsearch queries as well 3011 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3012 3013 // setting offset to 0 to fetch all resource ids to guarantee 3014 // correct output result for everything operation during paging 3015 if (myParams.getEverythingMode() != null) { 3016 offset = 0; 3017 } 3018 3019 SearchQueryProperties properties = mySearchProperties.clone(); 3020 properties 3021 .setOffset(offset) 3022 .setMaxResultsRequested(theMaxResultsToFetch) 3023 .setDoCountOnlyFlag(false) 3024 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 3025 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 3026 } 3027 3028 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3029 3030 retrieveNextIteratorQuery(); 3031 3032 mySkipCount = 0; 3033 myNonSkipCount = 0; 3034 } 3035 3036 private void retrieveNextIteratorQuery() { 3037 close(); 3038 if (isNotEmpty(myQueryList)) { 3039 myResultsIterator = myQueryList.remove(0); 3040 myHasNextIteratorQuery = true; 3041 } else { 3042 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 3043 myHasNextIteratorQuery = false; 3044 } 3045 } 3046 3047 @Override 3048 public boolean hasNext() { 3049 if (myNext == null) { 3050 fetchNext(); 3051 } 3052 return !NO_MORE.equals(myNext); 3053 } 3054 3055 @Override 3056 public JpaPid next() { 3057 fetchNext(); 3058 JpaPid retVal = myNext; 3059 myNext = null; 3060 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 3061 return retVal; 3062 } 3063 3064 @Override 3065 public int getSkippedCount() { 3066 return mySkipCount; 3067 } 3068 3069 @Override 3070 public int getNonSkippedCount() { 3071 return myNonSkipCount; 3072 } 3073 3074 @Override 3075 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 3076 Collection<JpaPid> batch = new ArrayList<>(); 3077 while (this.hasNext() && batch.size() < theBatchSize) { 3078 batch.add(this.next()); 3079 } 3080 return batch; 3081 } 3082 3083 @Override 3084 public void close() { 3085 if (myResultsIterator != null) { 3086 myResultsIterator.close(); 3087 } 3088 myResultsIterator = null; 3089 } 3090 } 3091 3092 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 3093 // Only log at debug level since these messages aren't considered important enough 3094 // that we should be cluttering the system log, but they are important to the 3095 // specific query being executed to we'll INFO level them there 3096 ourLog.debug(theMessage); 3097 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 3098 } 3099 3100 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3101 ourLog.warn(theMessage); 3102 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3103 } 3104 3105 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3106 IInterceptorBroadcaster compositeBroadcaster = 3107 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3108 if (compositeBroadcaster.hasHooks(thePointcut)) { 3109 StorageProcessingMessage message = new StorageProcessingMessage(); 3110 message.setMessage(theMessage); 3111 HookParams params = new HookParams() 3112 .add(RequestDetails.class, theRequest) 3113 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3114 .add(StorageProcessingMessage.class, message); 3115 compositeBroadcaster.callHooks(thePointcut, params); 3116 } 3117 } 3118 3119 public static int getMaximumPageSize() { 3120 if (myMaxPageSizeForTests != null) { 3121 return myMaxPageSizeForTests; 3122 } 3123 return MAXIMUM_PAGE_SIZE; 3124 } 3125 3126 public static void setMaxPageSizeForTest(Integer theTestSize) { 3127 myMaxPageSizeForTests = theTestSize; 3128 } 3129 3130 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3131 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3132 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3133 } 3134}