
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.model.util.JpaConstants; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 081import ca.uhn.fhir.jpa.util.SqlQueryList; 082import ca.uhn.fhir.model.api.IQueryParameterType; 083import ca.uhn.fhir.model.api.Include; 084import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 085import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 086import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 087import ca.uhn.fhir.rest.api.Constants; 088import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 089import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 090import ca.uhn.fhir.rest.api.SortOrderEnum; 091import ca.uhn.fhir.rest.api.SortSpec; 092import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 093import ca.uhn.fhir.rest.api.server.RequestDetails; 094import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 095import ca.uhn.fhir.rest.param.DateParam; 096import ca.uhn.fhir.rest.param.DateRangeParam; 097import ca.uhn.fhir.rest.param.ParamPrefixEnum; 098import ca.uhn.fhir.rest.param.ParameterUtil; 099import ca.uhn.fhir.rest.param.ReferenceParam; 100import ca.uhn.fhir.rest.param.StringParam; 101import ca.uhn.fhir.rest.param.TokenParam; 102import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 103import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 104import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 105import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 106import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 107import ca.uhn.fhir.system.HapiSystemProperties; 108import ca.uhn.fhir.util.SearchParameterUtil; 109import ca.uhn.fhir.util.StopWatch; 110import ca.uhn.fhir.util.StringUtil; 111import ca.uhn.fhir.util.UrlUtil; 112import com.google.common.annotations.VisibleForTesting; 113import com.google.common.collect.ListMultimap; 114import com.google.common.collect.Lists; 115import com.google.common.collect.MultimapBuilder; 116import com.healthmarketscience.sqlbuilder.Condition; 117import jakarta.annotation.Nonnull; 118import jakarta.annotation.Nullable; 119import jakarta.persistence.EntityManager; 120import jakarta.persistence.PersistenceContext; 121import jakarta.persistence.PersistenceContextType; 122import jakarta.persistence.Query; 123import jakarta.persistence.Tuple; 124import jakarta.persistence.TypedQuery; 125import jakarta.persistence.criteria.CriteriaBuilder; 126import jakarta.persistence.criteria.CriteriaQuery; 127import jakarta.persistence.criteria.Predicate; 128import jakarta.persistence.criteria.Root; 129import jakarta.persistence.criteria.Selection; 130import org.apache.commons.collections4.ListUtils; 131import org.apache.commons.lang3.StringUtils; 132import org.apache.commons.lang3.Validate; 133import org.apache.commons.lang3.math.NumberUtils; 134import org.apache.commons.lang3.tuple.Pair; 135import org.hibernate.ScrollMode; 136import org.hibernate.ScrollableResults; 137import org.hl7.fhir.instance.model.api.IAnyResource; 138import org.hl7.fhir.instance.model.api.IBaseResource; 139import org.hl7.fhir.instance.model.api.IIdType; 140import org.slf4j.Logger; 141import org.slf4j.LoggerFactory; 142import org.springframework.beans.factory.annotation.Autowired; 143import org.springframework.jdbc.core.JdbcTemplate; 144import org.springframework.transaction.support.TransactionSynchronizationManager; 145 146import java.util.ArrayList; 147import java.util.Arrays; 148import java.util.Collection; 149import java.util.Collections; 150import java.util.Comparator; 151import java.util.HashMap; 152import java.util.HashSet; 153import java.util.Iterator; 154import java.util.LinkedList; 155import java.util.List; 156import java.util.Map; 157import java.util.Objects; 158import java.util.Optional; 159import java.util.Set; 160import java.util.stream.Collectors; 161 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.NO_MORE; 163import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 165import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 166import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 167import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 168import static java.util.Objects.requireNonNull; 169import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 170import static org.apache.commons.lang3.StringUtils.isBlank; 171import static org.apache.commons.lang3.StringUtils.isNotBlank; 172import static org.apache.commons.lang3.StringUtils.stripStart; 173 174/** 175 * The SearchBuilder is responsible for actually forming the SQL query that handles 176 * searches for resources 177 */ 178public class SearchBuilder implements ISearchBuilder<JpaPid> { 179 180 /** 181 * See loadResourcesByPid 182 * for an explanation of why we use the constant 800 183 */ 184 // NB: keep public 185 @Deprecated 186 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 187 188 public static final String RESOURCE_ID_ALIAS = "resource_id"; 189 public static final String PARTITION_ID_ALIAS = "partition_id"; 190 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 191 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 192 193 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 194 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 195 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 196 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 197 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 198 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 199 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 200 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 201 public static Integer myMaxPageSizeForTests = null; 202 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 203 protected final IResourceTagDao myResourceTagDao; 204 private String myResourceName; 205 private final Class<? extends IBaseResource> myResourceType; 206 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 207 private final SqlObjectFactory mySqlBuilderFactory; 208 private final HibernatePropertiesProvider myDialectProvider; 209 private final ISearchParamRegistry mySearchParamRegistry; 210 private final PartitionSettings myPartitionSettings; 211 private final DaoRegistry myDaoRegistry; 212 private final FhirContext myContext; 213 private final IIdHelperService<JpaPid> myIdHelperService; 214 private final JpaStorageSettings myStorageSettings; 215 private final SearchQueryProperties mySearchProperties; 216 private final IResourceHistoryTableDao myResourceHistoryTableDao; 217 private final IJpaStorageResourceParser myJpaStorageResourceParser; 218 219 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 220 protected EntityManager myEntityManager; 221 222 private CriteriaBuilder myCriteriaBuilder; 223 private SearchParameterMap myParams; 224 private String mySearchUuid; 225 private int myFetchSize; 226 227 private boolean myRequiresTotal; 228 229 /** 230 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 231 */ 232 private Set<JpaPid> myPidSet; 233 234 private boolean myHasNextIteratorQuery = false; 235 private RequestPartitionId myRequestPartitionId; 236 237 private IFulltextSearchSvc myFulltextSearchSvc; 238 239 @Autowired(required = false) 240 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 241 myFulltextSearchSvc = theFulltextSearchSvc; 242 } 243 244 @Autowired(required = false) 245 private IElasticsearchSvc myIElasticsearchSvc; 246 247 @Autowired 248 private IResourceHistoryTagDao myResourceHistoryTagDao; 249 250 @Autowired 251 private IRequestPartitionHelperSvc myPartitionHelperSvc; 252 253 /** 254 * Constructor 255 */ 256 @SuppressWarnings({"rawtypes", "unchecked"}) 257 public SearchBuilder( 258 String theResourceName, 259 JpaStorageSettings theStorageSettings, 260 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 261 SqlObjectFactory theSqlBuilderFactory, 262 HibernatePropertiesProvider theDialectProvider, 263 ISearchParamRegistry theSearchParamRegistry, 264 PartitionSettings thePartitionSettings, 265 IInterceptorBroadcaster theInterceptorBroadcaster, 266 IResourceTagDao theResourceTagDao, 267 DaoRegistry theDaoRegistry, 268 FhirContext theContext, 269 IIdHelperService theIdHelperService, 270 IResourceHistoryTableDao theResourceHistoryTagDao, 271 IJpaStorageResourceParser theIJpaStorageResourceParser, 272 Class<? extends IBaseResource> theResourceType) { 273 myResourceName = theResourceName; 274 myResourceType = theResourceType; 275 myStorageSettings = theStorageSettings; 276 277 myEntityManagerFactory = theEntityManagerFactory; 278 mySqlBuilderFactory = theSqlBuilderFactory; 279 myDialectProvider = theDialectProvider; 280 mySearchParamRegistry = theSearchParamRegistry; 281 myPartitionSettings = thePartitionSettings; 282 myInterceptorBroadcaster = theInterceptorBroadcaster; 283 myResourceTagDao = theResourceTagDao; 284 myDaoRegistry = theDaoRegistry; 285 myContext = theContext; 286 myIdHelperService = theIdHelperService; 287 myResourceHistoryTableDao = theResourceHistoryTagDao; 288 myJpaStorageResourceParser = theIJpaStorageResourceParser; 289 290 mySearchProperties = new SearchQueryProperties(); 291 } 292 293 @VisibleForTesting 294 void setResourceName(String theName) { 295 myResourceName = theName; 296 } 297 298 @Override 299 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 300 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 301 } 302 303 @Override 304 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 305 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 306 } 307 308 @Override 309 public void setRequireTotal(boolean theRequireTotal) { 310 myRequiresTotal = theRequireTotal; 311 } 312 313 @Override 314 public boolean requiresTotal() { 315 return myRequiresTotal; 316 } 317 318 private void searchForIdsWithAndOr( 319 SearchQueryBuilder theSearchSqlBuilder, 320 QueryStack theQueryStack, 321 @Nonnull SearchParameterMap theParams, 322 RequestDetails theRequest) { 323 myParams = theParams; 324 mySearchProperties.setSortSpec(myParams.getSort()); 325 326 // Remove any empty parameters 327 theParams.clean(); 328 329 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 330 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 331 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 332 } 333 334 // Attempt to lookup via composite unique key. 335 if (isCompositeUniqueSpCandidate()) { 336 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 337 } 338 339 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 340 List<String> paramNames = myParams.keySet().stream() 341 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 342 .filter(t -> !t.equals(Constants.PARAM_TAG)) 343 .collect(Collectors.toList()); 344 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 345 paramNames.add(IAnyResource.SP_RES_ID); 346 } 347 if (myParams.containsKey(Constants.PARAM_TAG)) { 348 paramNames.add(Constants.PARAM_TAG); 349 } 350 351 // Handle each parameter 352 for (String nextParamName : paramNames) { 353 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 354 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 355 // Elasticsearch 356 continue; 357 } 358 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 359 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 360 .setParamName(nextParamName) 361 .setAndOrParams(andOrParams) 362 .setRequest(theRequest) 363 .setRequestPartitionId(myRequestPartitionId) 364 .setIncludeDeleted(myParams.getSearchIncludeDeletedMode())); 365 if (predicate != null) { 366 theSearchSqlBuilder.addPredicate(predicate); 367 } 368 } 369 } 370 371 /** 372 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 373 * parameters all have no modifiers. 374 */ 375 private boolean isCompositeUniqueSpCandidate() { 376 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 377 } 378 379 @SuppressWarnings("ConstantConditions") 380 @Override 381 public Long createCountQuery( 382 SearchParameterMap theParams, 383 String theSearchUuid, 384 RequestDetails theRequest, 385 @Nonnull RequestPartitionId theRequestPartitionId) { 386 387 assert theRequestPartitionId != null; 388 assert TransactionSynchronizationManager.isActualTransactionActive(); 389 390 init(theParams, theSearchUuid, theRequestPartitionId); 391 392 if (checkUseHibernateSearch()) { 393 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 394 } 395 396 SearchQueryProperties properties = mySearchProperties.clone(); 397 properties.setDoCountOnlyFlag(true); 398 properties.setSortSpec(null); // counts don't require sorts 399 properties.setMaxResultsRequested(null); 400 properties.setOffset(null); 401 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 402 if (queries.isEmpty()) { 403 return 0L; 404 } else { 405 JpaPid jpaPid = queries.get(0).next(); 406 return jpaPid.getId(); 407 } 408 } 409 410 /** 411 * @param thePidSet May be null 412 */ 413 @Override 414 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 415 myPidSet = new HashSet<>(thePidSet); 416 } 417 418 @SuppressWarnings("ConstantConditions") 419 @Override 420 public IResultIterator<JpaPid> createQuery( 421 SearchParameterMap theParams, 422 SearchRuntimeDetails theSearchRuntimeDetails, 423 RequestDetails theRequest, 424 @Nonnull RequestPartitionId theRequestPartitionId) { 425 assert theRequestPartitionId != null; 426 assert TransactionSynchronizationManager.isActualTransactionActive(); 427 428 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 429 430 if (myPidSet == null) { 431 myPidSet = new HashSet<>(); 432 } 433 434 return new QueryIterator(theSearchRuntimeDetails, theRequest); 435 } 436 437 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 438 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 439 // we mutate the params. Make a private copy. 440 myParams = theParams.clone(); 441 mySearchProperties.setSortSpec(myParams.getSort()); 442 mySearchUuid = theSearchUuid; 443 myRequestPartitionId = theRequestPartitionId; 444 } 445 446 /** 447 * The query created can be either a count query or the 448 * actual query. 449 * This is why it takes a SearchQueryProperties object 450 * (and doesn't use the local version of it). 451 * The properties may differ slightly for whichever 452 * query this is. 453 */ 454 private List<ISearchQueryExecutor> createQuery( 455 SearchParameterMap theParams, 456 SearchQueryProperties theSearchProperties, 457 RequestDetails theRequest, 458 SearchRuntimeDetails theSearchRuntimeDetails) { 459 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 460 461 if (checkUseHibernateSearch()) { 462 // we're going to run at least part of the search against the Fulltext service. 463 464 // Ugh - we have two different return types for now 465 ISearchQueryExecutor fulltextExecutor = null; 466 List<JpaPid> fulltextMatchIds = null; 467 int resultCount = 0; 468 if (myParams.isLastN()) { 469 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 470 resultCount = fulltextMatchIds.size(); 471 } else if (myParams.getEverythingMode() != null) { 472 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 473 resultCount = fulltextMatchIds.size(); 474 } else { 475 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 476 // enabled SP indexing). 477 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 478 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 479 // extra work in Elastic. 480 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 481 482 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 483 // a page. 484 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 485 } 486 487 if (fulltextExecutor == null) { 488 fulltextExecutor = 489 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 490 } 491 492 if (theSearchRuntimeDetails != null) { 493 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 494 IInterceptorBroadcaster compositeBroadcaster = 495 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 496 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 497 HookParams params = new HookParams() 498 .add(RequestDetails.class, theRequest) 499 .addIfMatchesType(ServletRequestDetails.class, theRequest) 500 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 501 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 502 } 503 } 504 505 // can we skip the database entirely and return the pid list from here? 506 boolean canSkipDatabase = 507 // if we processed an AND clause, and it returned nothing, then nothing can match. 508 !fulltextExecutor.hasNext() 509 || 510 // Our hibernate search query doesn't respect partitions yet 511 (!myPartitionSettings.isPartitioningEnabled() 512 && 513 // were there AND terms left? Then we still need the db. 514 theParams.isEmpty() 515 && 516 // not every param is a param. :-( 517 theParams.getNearDistanceParam() == null 518 && 519 // todo MB don't we support _lastUpdated and _offset now? 520 theParams.getLastUpdated() == null 521 && theParams.getEverythingMode() == null 522 && theParams.getOffset() == null); 523 524 if (canSkipDatabase) { 525 ourLog.trace("Query finished after HSearch. Skip db query phase"); 526 if (theSearchProperties.hasMaxResultsRequested()) { 527 fulltextExecutor = SearchQueryExecutors.limited( 528 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 529 } 530 queries.add(fulltextExecutor); 531 } else { 532 ourLog.trace("Query needs db after HSearch. Chunking."); 533 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 534 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 535 QueryChunker.chunk( 536 fulltextExecutor, 537 SearchBuilder.getMaximumPageSize(), 538 // for each list of (SearchBuilder.getMaximumPageSize()) 539 // we create a chunked query and add it to 'queries' 540 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 541 } 542 } else { 543 // do everything in the database. 544 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 545 } 546 547 return queries; 548 } 549 550 /** 551 * Check to see if query should use Hibernate Search, and error if the query can't continue. 552 * 553 * @return true if the query should first be processed by Hibernate Search 554 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 555 */ 556 private boolean checkUseHibernateSearch() { 557 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 558 559 if (!fulltextEnabled) { 560 failIfUsed(Constants.PARAM_TEXT); 561 failIfUsed(Constants.PARAM_CONTENT); 562 } else { 563 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 564 final String paramName = sortSpec.getParamName(); 565 if (paramName.contains(".")) { 566 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 567 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 568 } 569 } 570 } 571 572 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 573 // can. 574 return fulltextEnabled 575 && myParams != null 576 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 577 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 578 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 579 } 580 581 private void failIfUsed(String theParamName) { 582 if (myParams.containsKey(theParamName)) { 583 throw new InvalidRequestException(Msg.code(1192) 584 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 585 } 586 } 587 588 private void failIfUsedWithChainedSort(String theParamName) { 589 if (myParams.containsKey(theParamName)) { 590 throw new InvalidRequestException(Msg.code(2524) 591 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 592 + theParamName); 593 } 594 } 595 596 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 597 // Can we use our hibernate search generated index on resource to support lastN?: 598 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 599 if (myFulltextSearchSvc == null) { 600 throw new InvalidRequestException(Msg.code(2027) 601 + "LastN operation is not enabled on this service, can not process this request"); 602 } 603 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 604 .map(t -> (JpaPid) t) 605 .collect(Collectors.toList()); 606 } else { 607 throw new InvalidRequestException( 608 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 609 } 610 } 611 612 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 613 JpaPid pid = null; 614 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 615 String idParamValue; 616 IQueryParameterType idParam = 617 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 618 if (idParam instanceof TokenParam idParm) { 619 idParamValue = idParm.getValue(); 620 } else { 621 StringParam idParm = (StringParam) idParam; 622 idParamValue = idParm.getValue(); 623 } 624 625 pid = myIdHelperService 626 .resolveResourceIdentity( 627 myRequestPartitionId, 628 myResourceName, 629 idParamValue, 630 ResolveIdentityMode.includeDeleted().cacheOk()) 631 .getPersistentId(); 632 } 633 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 634 } 635 636 private void doCreateChunkedQueries( 637 SearchParameterMap theParams, 638 List<JpaPid> thePids, 639 SearchQueryProperties theSearchQueryProperties, 640 RequestDetails theRequest, 641 ArrayList<ISearchQueryExecutor> theQueries) { 642 643 if (thePids.size() < getMaximumPageSize()) { 644 thePids = normalizeIdListForInClause(thePids); 645 } 646 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 647 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 648 } 649 650 /** 651 * Combs through the params for any _id parameters and extracts the PIDs for them 652 */ 653 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 654 // get all the IQueryParameterType objects 655 // for _id -> these should all be StringParam values 656 HashSet<IIdType> ids = new HashSet<>(); 657 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 658 for (List<IQueryParameterType> paramList : params) { 659 for (IQueryParameterType param : paramList) { 660 String id; 661 if (param instanceof StringParam) { 662 // we expect all _id values to be StringParams 663 id = ((StringParam) param).getValue(); 664 } else if (param instanceof TokenParam) { 665 id = ((TokenParam) param).getValue(); 666 } else { 667 // we do not expect the _id parameter to be a non-string value 668 throw new IllegalArgumentException( 669 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 670 } 671 672 IIdType idType = myContext.getVersion().newIdType(); 673 if (id.contains("/")) { 674 idType.setValue(id); 675 } else { 676 idType.setValue(myResourceName + "/" + id); 677 } 678 ids.add(idType); 679 } 680 } 681 682 // fetch our target Pids 683 // this will throw if an id is not found 684 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 685 myRequestPartitionId, 686 new ArrayList<>(ids), 687 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 688 689 // add the pids to targetPids 690 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 691 theTargetPids.add(pid.getPersistentId()); 692 } 693 } 694 695 private void createChunkedQuery( 696 SearchParameterMap theParams, 697 SearchQueryProperties theSearchProperties, 698 RequestDetails theRequest, 699 List<JpaPid> thePidList, 700 List<ISearchQueryExecutor> theSearchQueryExecutors) { 701 if (myParams.getEverythingMode() != null) { 702 createChunkedQueryForEverythingSearch( 703 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 704 } else { 705 createChunkedQueryNormalSearch( 706 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 707 } 708 } 709 710 private void createChunkedQueryNormalSearch( 711 SearchParameterMap theParams, 712 SearchQueryProperties theSearchProperties, 713 RequestDetails theRequest, 714 List<JpaPid> thePidList, 715 List<ISearchQueryExecutor> theSearchQueryExecutors) { 716 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 717 myContext, 718 myStorageSettings, 719 myPartitionSettings, 720 myRequestPartitionId, 721 myResourceName, 722 mySqlBuilderFactory, 723 myDialectProvider, 724 theSearchProperties.isDoCountOnlyFlag(), 725 myResourceName == null || myResourceName.isBlank()); 726 QueryStack queryStack3 = new QueryStack( 727 theRequest, 728 theParams, 729 myStorageSettings, 730 myContext, 731 sqlBuilder, 732 mySearchParamRegistry, 733 myPartitionSettings); 734 735 if (theParams.keySet().size() > 1 736 || theParams.getSort() != null 737 || theParams.keySet().contains(Constants.PARAM_HAS) 738 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 739 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 740 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 741 if (activeComboParams.isEmpty()) { 742 sqlBuilder.setNeedResourceTableRoot(true); 743 } 744 } 745 746 /* 747 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 748 * specific filters with ORs as their root from working around the natural resource type / deletion 749 * status / partition IDs built into queries. 750 */ 751 if (theParams.containsKey(Constants.PARAM_FILTER)) { 752 Condition partitionIdPredicate = sqlBuilder 753 .getOrCreateResourceTablePredicateBuilder() 754 .createPartitionIdPredicate(myRequestPartitionId); 755 if (partitionIdPredicate != null) { 756 sqlBuilder.addPredicate(partitionIdPredicate); 757 } 758 } 759 760 // Normal search 761 // we will create a resourceTablePredicate if and only if we have an _id SP. 762 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 763 764 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 765 // partition ID predicate in that case. 766 if (!sqlBuilder.haveAtLeastOnePredicate()) { 767 Condition partitionIdPredicate; 768 769 if (theParams.getSearchIncludeDeletedMode() != null) { 770 partitionIdPredicate = sqlBuilder 771 .getOrCreateResourceTablePredicateBuilder(true, theParams.getSearchIncludeDeletedMode()) 772 .createPartitionIdPredicate(myRequestPartitionId); 773 } else { 774 partitionIdPredicate = sqlBuilder 775 .getOrCreateResourceTablePredicateBuilder() 776 .createPartitionIdPredicate(myRequestPartitionId); 777 } 778 779 if (partitionIdPredicate != null) { 780 sqlBuilder.addPredicate(partitionIdPredicate); 781 } 782 } 783 784 // Add PID list predicate for full text search and/or lastn operation 785 addPidListPredicate(thePidList, sqlBuilder); 786 787 // Last updated 788 addLastUpdatePredicate(sqlBuilder); 789 790 /* 791 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 792 * to something needed to guarantee correct results. 793 * 794 * Why do we need it? Suppose for example, a query like: 795 * Observation?category=foo,bar,baz 796 * And suppose you have many resources that have all 3 of these category codes. In this case 797 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 798 * we may exhaust the query results without getting enough distinct results back. When that 799 * happens we re-run the query with a larger limit. Excluding results we already know about 800 * tries to ensure that we get new unique results. 801 * 802 * The challenge with that though is that lots of DBs have an issue with too many 803 * parameters in one query. So we only do this optimization if there aren't too 804 * many results. 805 */ 806 if (myHasNextIteratorQuery) { 807 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 808 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 809 } 810 } 811 812 /* 813 * If offset is present, we want to deduplicate the results by using GROUP BY; 814 * OR 815 * if the MaxResultsToFetch is null, we are requesting "everything", 816 * so we'll let the db do the deduplication (instead of in-memory) 817 */ 818 if (theSearchProperties.isDeduplicateInDatabase()) { 819 queryStack3.addGrouping(); 820 queryStack3.setUseAggregate(true); 821 } 822 823 /* 824 * Sort 825 * 826 * If we have a sort, we wrap the criteria search (the search that actually 827 * finds the appropriate resources) in an outer search which is then sorted 828 */ 829 if (theSearchProperties.hasSort()) { 830 assert !theSearchProperties.isDoCountOnlyFlag(); 831 832 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 833 } 834 835 /* 836 * Now perform the search 837 */ 838 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 839 } 840 841 private void executeSearch( 842 SearchQueryProperties theProperties, 843 List<ISearchQueryExecutor> theSearchQueryExecutors, 844 SearchQueryBuilder sqlBuilder) { 845 GeneratedSql generatedSql = 846 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 847 if (!generatedSql.isMatchNothing()) { 848 SearchQueryExecutor executor = 849 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 850 theSearchQueryExecutors.add(executor); 851 } 852 } 853 854 private void createChunkedQueryForEverythingSearch( 855 RequestDetails theRequest, 856 SearchParameterMap theParams, 857 SearchQueryProperties theSearchQueryProperties, 858 List<JpaPid> thePidList, 859 List<ISearchQueryExecutor> theSearchQueryExecutors) { 860 861 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 862 myContext, 863 myStorageSettings, 864 myPartitionSettings, 865 myRequestPartitionId, 866 null, 867 mySqlBuilderFactory, 868 myDialectProvider, 869 theSearchQueryProperties.isDoCountOnlyFlag(), 870 false); 871 872 QueryStack queryStack3 = new QueryStack( 873 theRequest, 874 theParams, 875 myStorageSettings, 876 myContext, 877 sqlBuilder, 878 mySearchParamRegistry, 879 myPartitionSettings); 880 881 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 882 883 Set<JpaPid> targetPids = new HashSet<>(); 884 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 885 886 extractTargetPidsFromIdParams(targetPids); 887 888 // add the target pids to our executors as the first 889 // results iterator to go through 890 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 891 } else { 892 // For Everything queries, we make the query root by the ResourceLink table, since this query 893 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 894 // the one problem with this approach is that it doesn't catch Patients that have absolutely 895 // nothing linked to them. So we do one additional query to make sure we catch those too. 896 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 897 myContext, 898 myStorageSettings, 899 myPartitionSettings, 900 myRequestPartitionId, 901 myResourceName, 902 mySqlBuilderFactory, 903 myDialectProvider, 904 theSearchQueryProperties.isDoCountOnlyFlag(), 905 false); 906 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 907 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 908 String sql = allTargetsSql.getSql(); 909 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 910 911 List<JpaPid> output = 912 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 913 914 // we add a search executor to fetch unlinked patients first 915 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 916 } 917 918 List<String> typeSourceResources = new ArrayList<>(); 919 if (myParams.get(Constants.PARAM_TYPE) != null) { 920 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 921 } 922 923 queryStack3.addPredicateEverythingOperation( 924 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 925 926 // Add PID list predicate for full text search and/or lastn operation 927 addPidListPredicate(thePidList, sqlBuilder); 928 929 /* 930 * If offset is present, we want deduplicate the results by using GROUP BY 931 * ORDER BY is required to make sure we return unique results for each page 932 */ 933 if (theSearchQueryProperties.hasOffset()) { 934 queryStack3.addGrouping(); 935 queryStack3.addOrdering(); 936 queryStack3.setUseAggregate(true); 937 } 938 939 if (myParams.getEverythingMode().isPatient()) { 940 /* 941 * NB: patient-compartment limitation 942 * 943 * We are manually excluding Group and List resources 944 * from the patient-compartment for $everything operations on Patient type/instance. 945 * 946 * See issue: https://github.com/hapifhir/hapi-fhir/issues/7118 947 */ 948 sqlBuilder.excludeResourceTypesPredicate( 949 SearchParameterUtil.RESOURCE_TYPES_TO_SP_TO_OMIT_FROM_PATIENT_COMPARTMENT.keySet()); 950 } 951 952 /* 953 * Now perform the search 954 */ 955 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 956 } 957 958 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 959 if (thePidList != null && !thePidList.isEmpty()) { 960 theSqlBuilder.addResourceIdsPredicate(thePidList); 961 } 962 } 963 964 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 965 DateRangeParam lu = myParams.getLastUpdated(); 966 if (lu != null && !lu.isEmpty()) { 967 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 968 theSqlBuilder.addPredicate(lastUpdatedPredicates); 969 } 970 } 971 972 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 973 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 974 jdbcTemplate.setFetchSize(myFetchSize); 975 if (theMaximumResults != null) { 976 jdbcTemplate.setMaxRows(theMaximumResults); 977 } 978 return jdbcTemplate; 979 } 980 981 private Collection<String> extractTypeSourceResourcesFromParams() { 982 983 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 984 985 // first off, let's flatten the list of list 986 List<IQueryParameterType> iQueryParameterTypesList = 987 listOfList.stream().flatMap(List::stream).toList(); 988 989 // then, extract all elements of each CSV into one big list 990 List<String> resourceTypes = iQueryParameterTypesList.stream() 991 .map(param -> ((StringParam) param).getValue()) 992 .map(csvString -> List.of(csvString.split(","))) 993 .flatMap(List::stream) 994 .toList(); 995 996 Set<String> knownResourceTypes = myContext.getResourceTypes(); 997 998 // remove leading/trailing whitespaces if any and remove duplicates 999 Set<String> retVal = new HashSet<>(); 1000 1001 for (String type : resourceTypes) { 1002 String trimmed = type.trim(); 1003 if (!knownResourceTypes.contains(trimmed)) { 1004 throw new ResourceNotFoundException( 1005 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 1006 } 1007 retVal.add(trimmed); 1008 } 1009 1010 return retVal; 1011 } 1012 1013 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 1014 return myStorageSettings.isIndexOnContainedResources() 1015 && theParams.values().stream() 1016 .flatMap(Collection::stream) 1017 .flatMap(Collection::stream) 1018 .anyMatch(ReferenceParam.class::isInstance); 1019 } 1020 1021 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1022 if (theSort == null || isBlank(theSort.getParamName())) { 1023 return; 1024 } 1025 1026 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1027 1028 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1029 1030 theQueryStack.addSortOnResourceId(ascending); 1031 1032 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1033 1034 theQueryStack.addSortOnResourcePID(ascending); 1035 1036 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1037 1038 theQueryStack.addSortOnLastUpdated(ascending); 1039 1040 } else { 1041 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1042 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1043 1044 /* 1045 * If we have a sort like _sort=subject.name and we have an 1046 * uplifted refchain for that combination we can do it more efficiently 1047 * by using the index associated with the uplifted refchain. In this case, 1048 * we need to find the actual target search parameter (corresponding 1049 * to "name" in this example) so that we know what datatype it is. 1050 */ 1051 String paramName = theSort.getParamName(); 1052 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1053 String[] chains = StringUtils.split(paramName, '.'); 1054 if (chains.length == 2) { 1055 1056 // Given: Encounter?_sort=Patient:subject.name 1057 String referenceParam = chains[0]; // subject 1058 String referenceParamTargetType = null; // Patient 1059 String targetParam = chains[1]; // name 1060 1061 int colonIdx = referenceParam.indexOf(':'); 1062 if (colonIdx > -1) { 1063 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1064 referenceParam = referenceParam.substring(colonIdx + 1); 1065 } 1066 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1067 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1068 if (outerParam == null) { 1069 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1070 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1071 for (String nextTargetType : outerParam.getTargets()) { 1072 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1073 continue; 1074 } 1075 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1076 nextTargetType, 1077 targetParam, 1078 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1079 if (innerParam != null) { 1080 param = innerParam; 1081 break; 1082 } 1083 } 1084 } 1085 } 1086 } 1087 1088 int colonIdx = paramName.indexOf(':'); 1089 String referenceTargetType = null; 1090 if (colonIdx > -1) { 1091 referenceTargetType = paramName.substring(0, colonIdx); 1092 paramName = paramName.substring(colonIdx + 1); 1093 } 1094 1095 int dotIdx = paramName.indexOf('.'); 1096 String chainName = null; 1097 if (param == null && dotIdx > -1) { 1098 chainName = paramName.substring(dotIdx + 1); 1099 paramName = paramName.substring(0, dotIdx); 1100 if (chainName.contains(".")) { 1101 String msg = myContext 1102 .getLocalizer() 1103 .getMessageSanitized( 1104 BaseStorageDao.class, 1105 "invalidSortParameterTooManyChains", 1106 paramName + "." + chainName); 1107 throw new InvalidRequestException(Msg.code(2286) + msg); 1108 } 1109 } 1110 1111 if (param == null) { 1112 param = mySearchParamRegistry.getActiveSearchParam( 1113 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1114 } 1115 1116 if (param == null) { 1117 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1118 } 1119 1120 // param will never be null here (the above line throws if it does) 1121 // this is just to prevent the warning 1122 assert param != null; 1123 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1124 throw new InvalidRequestException( 1125 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1126 } 1127 1128 switch (param.getParamType()) { 1129 case STRING: 1130 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1131 break; 1132 case DATE: 1133 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1134 break; 1135 case REFERENCE: 1136 theQueryStack.addSortOnResourceLink( 1137 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1138 break; 1139 case TOKEN: 1140 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1141 break; 1142 case NUMBER: 1143 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1144 break; 1145 case URI: 1146 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1147 break; 1148 case QUANTITY: 1149 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1150 break; 1151 case COMPOSITE: 1152 List<RuntimeSearchParam> compositeList = 1153 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1154 if (compositeList == null) { 1155 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1156 + " is not defined by the resource " + myResourceName); 1157 } 1158 if (compositeList.size() != 2) { 1159 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1160 + " must have 2 composite types declared in parameter annotation, found " 1161 + compositeList.size()); 1162 } 1163 RuntimeSearchParam left = compositeList.get(0); 1164 RuntimeSearchParam right = compositeList.get(1); 1165 1166 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1167 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1168 1169 break; 1170 case SPECIAL: 1171 if (LOCATION_POSITION.equals(param.getPath())) { 1172 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1173 break; 1174 } 1175 throw new InvalidRequestException( 1176 Msg.code(2306) + "This server does not support _sort specifications of type " 1177 + param.getParamType() + " - Can't serve _sort=" + paramName); 1178 1179 case HAS: 1180 default: 1181 throw new InvalidRequestException( 1182 Msg.code(1197) + "This server does not support _sort specifications of type " 1183 + param.getParamType() + " - Can't serve _sort=" + paramName); 1184 } 1185 } 1186 1187 // Recurse 1188 createSort(theQueryStack, theSort.getChain(), theParams); 1189 } 1190 1191 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1192 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1193 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1194 String msg = myContext 1195 .getLocalizer() 1196 .getMessageSanitized( 1197 BaseStorageDao.class, 1198 "invalidSortParameter", 1199 theParamName, 1200 theResourceName, 1201 validSearchParameterNames); 1202 throw new InvalidRequestException(Msg.code(1194) + msg); 1203 } 1204 1205 private void createCompositeSort( 1206 QueryStack theQueryStack, 1207 RestSearchParameterTypeEnum theParamType, 1208 String theParamName, 1209 boolean theAscending) { 1210 1211 switch (theParamType) { 1212 case STRING: 1213 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1214 break; 1215 case DATE: 1216 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1217 break; 1218 case TOKEN: 1219 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1220 break; 1221 case QUANTITY: 1222 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1223 break; 1224 case NUMBER: 1225 case REFERENCE: 1226 case COMPOSITE: 1227 case URI: 1228 case HAS: 1229 case SPECIAL: 1230 default: 1231 throw new InvalidRequestException( 1232 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1233 + " on _sort=" + theParamName); 1234 } 1235 } 1236 1237 private void doLoadPids( 1238 RequestDetails theRequest, 1239 Collection<JpaPid> thePids, 1240 Collection<JpaPid> theIncludedPids, 1241 List<IBaseResource> theResourceListToPopulate, 1242 boolean theForHistoryOperation, 1243 Map<Long, Integer> thePosition) { 1244 1245 Map<JpaPid, Long> resourcePidToVersion = null; 1246 for (JpaPid next : thePids) { 1247 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1248 if (resourcePidToVersion == null) { 1249 resourcePidToVersion = new HashMap<>(); 1250 } 1251 resourcePidToVersion.put(next, next.getVersion()); 1252 } 1253 } 1254 1255 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1256 int expectedCount = versionlessPids.size(); 1257 if (versionlessPids.size() < getMaximumPageSize()) { 1258 /* 1259 * This method adds a bunch of extra params to the end of the parameter list 1260 * which are for a resource PID that will never exist (-1 / NO_MORE). We do this 1261 * so that the database can rely on a cached execution plan since we're not 1262 * generating a new SQL query for every possible number of resources. 1263 */ 1264 versionlessPids = normalizeIdListForInClause(versionlessPids); 1265 } 1266 1267 // Load the resource bodies 1268 List<JpaPidFk> historyVersionPks = JpaPidFk.fromPids(versionlessPids); 1269 List<ResourceHistoryTable> resourceSearchViewList = 1270 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable(historyVersionPks); 1271 1272 /* 1273 * If we have specific versions to load, replace the history entries with the 1274 * correct ones 1275 * 1276 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1277 * version entity first, and by batching the fetches. But this is a fairly infrequently 1278 * used feature, and loading history entities by PK is a very efficient query so it's 1279 * not the end of the world 1280 */ 1281 if (resourcePidToVersion != null) { 1282 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1283 ResourceHistoryTable next = resourceSearchViewList.get(i); 1284 JpaPid resourceId = next.getPersistentId(); 1285 Long version = resourcePidToVersion.get(resourceId); 1286 resourceId.setVersion(version); 1287 if (version != null && !version.equals(next.getVersion())) { 1288 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1289 next.getResourceId().toFk(), version); 1290 resourceSearchViewList.set(i, replacement); 1291 } 1292 } 1293 } 1294 1295 /* 1296 * If we got fewer rows back than we expected, that means that one or more ResourceTable 1297 * entities (HFJ_RESOURCE) have a RES_VER version which doesn't exist in the 1298 * ResourceHistoryTable (HFJ_RES_VER) table. This should never happen under normal 1299 * operation, but if someone manually deletes a row or otherwise ends up in a weird 1300 * state it can happen. In that case, we do a manual process of figuring out what 1301 * is the right version. 1302 */ 1303 if (resourceSearchViewList.size() != expectedCount) { 1304 1305 Set<JpaPid> loadedPks = resourceSearchViewList.stream() 1306 .map(ResourceHistoryTable::getResourceId) 1307 .collect(Collectors.toSet()); 1308 for (JpaPid nextWantedPid : versionlessPids) { 1309 if (!nextWantedPid.equals(NO_MORE) && !loadedPks.contains(nextWantedPid)) { 1310 Optional<ResourceHistoryTable> latestVersion = findLatestVersion( 1311 theRequest, nextWantedPid, myResourceHistoryTableDao, myInterceptorBroadcaster); 1312 latestVersion.ifPresent(resourceSearchViewList::add); 1313 } 1314 } 1315 } 1316 1317 // -- preload all tags with tag definition if any 1318 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1319 1320 for (ResourceHistoryTable next : resourceSearchViewList) { 1321 if (next.getDeleted() != null) { 1322 continue; 1323 } 1324 1325 Class<? extends IBaseResource> resourceType = 1326 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1327 1328 JpaPid resourceId = next.getPersistentId(); 1329 1330 if (resourcePidToVersion != null) { 1331 Long version = resourcePidToVersion.get(resourceId); 1332 resourceId.setVersion(version); 1333 } 1334 1335 IBaseResource resource; 1336 resource = myJpaStorageResourceParser.toResource( 1337 theRequest, resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1338 if (resource == null) { 1339 ourLog.warn( 1340 "Unable to find resource {}/{}/_history/{} in database", 1341 next.getResourceType(), 1342 next.getIdDt().getIdPart(), 1343 next.getVersion()); 1344 continue; 1345 } 1346 1347 Integer index = thePosition.get(resourceId.getId()); 1348 if (index == null) { 1349 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1350 continue; 1351 } 1352 1353 if (theIncludedPids.contains(resourceId)) { 1354 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1355 } else { 1356 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1357 } 1358 1359 // ensure there's enough space; "<=" because of 0-indexing 1360 while (theResourceListToPopulate.size() <= index) { 1361 theResourceListToPopulate.add(null); 1362 } 1363 theResourceListToPopulate.set(index, resource); 1364 } 1365 } 1366 1367 @SuppressWarnings("OptionalIsPresent") 1368 @Nonnull 1369 public static Optional<ResourceHistoryTable> findLatestVersion( 1370 RequestDetails theRequest, 1371 JpaPid nextWantedPid, 1372 IResourceHistoryTableDao resourceHistoryTableDao, 1373 IInterceptorBroadcaster interceptorBroadcaster1) { 1374 assert nextWantedPid != null && !nextWantedPid.equals(NO_MORE); 1375 1376 Optional<ResourceHistoryTable> latestVersion = resourceHistoryTableDao 1377 .findVersionsForResource(JpaConstants.SINGLE_RESULT, nextWantedPid.toFk()) 1378 .findFirst(); 1379 String warning; 1380 if (latestVersion.isPresent()) { 1381 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1382 + " specifies an unknown current version, returning version " 1383 + latestVersion.get().getVersion() 1384 + " instead. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1385 } else { 1386 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1387 + " specifies an unknown current version, and no versions of this resource exist. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1388 } 1389 1390 IInterceptorBroadcaster interceptorBroadcaster = 1391 CompositeInterceptorBroadcaster.newCompositeBroadcaster(interceptorBroadcaster1, theRequest); 1392 logAndBoradcastWarning(theRequest, warning, interceptorBroadcaster); 1393 return latestVersion; 1394 } 1395 1396 private static void logAndBoradcastWarning( 1397 RequestDetails theRequest, String warning, IInterceptorBroadcaster interceptorBroadcaster) { 1398 ourLog.warn(warning); 1399 1400 if (interceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) { 1401 HookParams params = new HookParams(); 1402 params.add(RequestDetails.class, theRequest); 1403 params.addIfMatchesType(ServletRequestDetails.class, theRequest); 1404 params.add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(warning)); 1405 interceptorBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params); 1406 } 1407 } 1408 1409 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1410 return switch (myStorageSettings.getTagStorageMode()) { 1411 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1412 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1413 case INLINE -> Map.of(); 1414 }; 1415 } 1416 1417 @Nonnull 1418 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1419 Collection<ResourceHistoryTable> theHistoryTables) { 1420 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1421 1422 // -- find all resource has tags 1423 for (ResourceHistoryTable resource : theHistoryTables) { 1424 if (resource.isHasTags()) { 1425 idList.add(resource.getId()); 1426 } 1427 } 1428 1429 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1430 1431 // -- no tags 1432 if (idList.isEmpty()) { 1433 return tagMap; 1434 } 1435 1436 // -- get all tags for the idList 1437 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1438 1439 // -- build the map, key = resourceId, value = list of ResourceTag 1440 JpaPid resourceId; 1441 Collection<BaseTag> tagCol; 1442 for (ResourceHistoryTag tag : tagList) { 1443 1444 resourceId = tag.getResourcePid(); 1445 tagCol = tagMap.get(resourceId); 1446 if (tagCol == null) { 1447 tagCol = new ArrayList<>(); 1448 tagCol.add(tag); 1449 tagMap.put(resourceId, tagCol); 1450 } else { 1451 tagCol.add(tag); 1452 } 1453 } 1454 1455 return tagMap; 1456 } 1457 1458 @Nonnull 1459 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1460 Collection<ResourceHistoryTable> theHistoryTables) { 1461 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1462 1463 // -- find all resource has tags 1464 for (ResourceHistoryTable resource : theHistoryTables) { 1465 if (resource.isHasTags()) { 1466 idList.add(resource.getResourceId()); 1467 } 1468 } 1469 1470 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1471 1472 // -- no tags 1473 if (idList.isEmpty()) { 1474 return tagMap; 1475 } 1476 1477 // -- get all tags for the idList 1478 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1479 1480 // -- build the map, key = resourceId, value = list of ResourceTag 1481 JpaPid resourceId; 1482 Collection<BaseTag> tagCol; 1483 for (ResourceTag tag : tagList) { 1484 1485 resourceId = tag.getResourceId(); 1486 tagCol = tagMap.get(resourceId); 1487 if (tagCol == null) { 1488 tagCol = new ArrayList<>(); 1489 tagCol.add(tag); 1490 tagMap.put(resourceId, tagCol); 1491 } else { 1492 tagCol.add(tag); 1493 } 1494 } 1495 1496 return tagMap; 1497 } 1498 1499 @Override 1500 public void loadResourcesByPid( 1501 Collection<JpaPid> thePids, 1502 Collection<JpaPid> theIncludedPids, 1503 List<IBaseResource> theResourceListToPopulate, 1504 boolean theForHistoryOperation, 1505 RequestDetails theRequestDetails) { 1506 if (thePids.isEmpty()) { 1507 ourLog.debug("The include pids are empty"); 1508 } 1509 1510 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1511 // when running asserts 1512 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1513 1514 Map<Long, Integer> position = new HashMap<>(); 1515 int index = 0; 1516 for (JpaPid next : thePids) { 1517 position.put(next.getId(), index++); 1518 } 1519 1520 // Can we fast track this loading by checking elastic search? 1521 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1522 if (isUsingElasticSearch) { 1523 try { 1524 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1525 return; 1526 1527 } catch (ResourceNotFoundInIndexException theE) { 1528 // some resources were not found in index, so we will inform this and resort to JPA search 1529 ourLog.warn( 1530 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1531 } 1532 } 1533 1534 // We only chunk because some jdbc drivers can't handle long param lists. 1535 QueryChunker.chunk( 1536 thePids, 1537 t -> doLoadPids( 1538 theRequestDetails, 1539 t, 1540 theIncludedPids, 1541 theResourceListToPopulate, 1542 theForHistoryOperation, 1543 position)); 1544 } 1545 1546 /** 1547 * Check if we can load the resources from Hibernate Search instead of the database. 1548 * We assume this is faster. 1549 * <p> 1550 * Hibernate Search only stores the current version, and only if enabled. 1551 * 1552 * @param thePids the pids to check for versioned references 1553 * @return can we fetch from Hibernate Search? 1554 */ 1555 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1556 // is storage enabled? 1557 return myStorageSettings.isStoreResourceInHSearchIndex() 1558 && myStorageSettings.isHibernateSearchIndexSearchParams() 1559 && 1560 // we don't support history 1561 thePids.stream().noneMatch(p -> p.getVersion() != null) 1562 && 1563 // skip the complexity for metadata in dstu2 1564 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1565 } 1566 1567 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1568 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1569 // only impl 1570 // to handle lastN? 1571 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1572 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1573 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1574 1575 return myFulltextSearchSvc.getResources(pidList); 1576 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1577 // legacy LastN implementation 1578 return myIElasticsearchSvc.getObservationResources(thePids); 1579 } else { 1580 return Collections.emptyList(); 1581 } 1582 } 1583 1584 /** 1585 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1586 * so it can't be Collections.emptySet() or some such thing. 1587 * The JpaPid returned will have resource type populated. 1588 */ 1589 @Override 1590 public Set<JpaPid> loadIncludes( 1591 FhirContext theContext, 1592 EntityManager theEntityManager, 1593 Collection<JpaPid> theMatches, 1594 Collection<Include> theIncludes, 1595 boolean theReverseMode, 1596 DateRangeParam theLastUpdated, 1597 String theSearchIdOrDescription, 1598 RequestDetails theRequest, 1599 Integer theMaxCount) { 1600 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1601 parameters.setFhirContext(theContext); 1602 parameters.setEntityManager(theEntityManager); 1603 parameters.setMatches(theMatches); 1604 parameters.setIncludeFilters(theIncludes); 1605 parameters.setReverseMode(theReverseMode); 1606 parameters.setLastUpdated(theLastUpdated); 1607 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1608 parameters.setRequestDetails(theRequest); 1609 parameters.setMaxCount(theMaxCount); 1610 return loadIncludes(parameters); 1611 } 1612 1613 @Override 1614 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1615 Collection<JpaPid> matches = theParameters.getMatches(); 1616 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1617 boolean reverseMode = theParameters.isReverseMode(); 1618 EntityManager entityManager = theParameters.getEntityManager(); 1619 Integer maxCount = theParameters.getMaxCount(); 1620 FhirContext fhirContext = theParameters.getFhirContext(); 1621 RequestDetails request = theParameters.getRequestDetails(); 1622 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1623 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1624 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1625 IInterceptorBroadcaster compositeBroadcaster = 1626 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1627 1628 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1629 CurrentThreadCaptureQueriesListener.startCapturing(); 1630 } 1631 if (matches.isEmpty()) { 1632 return new HashSet<>(); 1633 } 1634 if (currentIncludes == null || currentIncludes.isEmpty()) { 1635 return new HashSet<>(); 1636 } 1637 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1638 String searchPartitionIdFieldName = 1639 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1640 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1641 String findPartitionIdFieldName = 1642 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1643 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1644 String findVersionFieldName = null; 1645 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1646 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1647 } 1648 1649 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1650 HashSet<JpaPid> allAdded = new HashSet<>(); 1651 HashSet<JpaPid> original = new HashSet<>(matches); 1652 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1653 1654 int roundCounts = 0; 1655 StopWatch w = new StopWatch(); 1656 1657 boolean addedSomeThisRound; 1658 do { 1659 roundCounts++; 1660 1661 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1662 1663 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1664 Include nextInclude = iter.next(); 1665 if (!nextInclude.isRecurse()) { 1666 iter.remove(); 1667 } 1668 1669 // Account for _include=* 1670 boolean matchAll = "*".equals(nextInclude.getValue()); 1671 1672 // Account for _include=[resourceType]:* 1673 String wantResourceType = null; 1674 if (!matchAll) { 1675 if ("*".equals(nextInclude.getParamName())) { 1676 wantResourceType = nextInclude.getParamType(); 1677 matchAll = true; 1678 } 1679 } 1680 1681 if (matchAll) { 1682 loadIncludesMatchAll( 1683 findPidFieldName, 1684 findPartitionIdFieldName, 1685 findResourceTypeFieldName, 1686 findVersionFieldName, 1687 searchPidFieldName, 1688 searchPartitionIdFieldName, 1689 wantResourceType, 1690 reverseMode, 1691 hasDesiredResourceTypes, 1692 nextRoundMatches, 1693 entityManager, 1694 maxCount, 1695 desiredResourceTypes, 1696 pidsToInclude, 1697 request); 1698 } else { 1699 loadIncludesMatchSpecific( 1700 nextInclude, 1701 fhirContext, 1702 findPidFieldName, 1703 findPartitionIdFieldName, 1704 findVersionFieldName, 1705 searchPidFieldName, 1706 reverseMode, 1707 nextRoundMatches, 1708 entityManager, 1709 maxCount, 1710 pidsToInclude, 1711 request); 1712 } 1713 } 1714 1715 nextRoundMatches.clear(); 1716 for (JpaPid next : pidsToInclude) { 1717 if (!original.contains(next) && !allAdded.contains(next)) { 1718 nextRoundMatches.add(next); 1719 } else { 1720 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1721 } 1722 } 1723 1724 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1725 1726 if (maxCount != null && allAdded.size() >= maxCount) { 1727 break; 1728 } 1729 1730 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1731 1732 allAdded.removeAll(original); 1733 1734 ourLog.info( 1735 "Loaded {} {} in {} rounds and {} ms for search {}", 1736 allAdded.size(), 1737 reverseMode ? "_revincludes" : "_includes", 1738 roundCounts, 1739 w.getMillisAndRestart(), 1740 searchIdOrDescription); 1741 1742 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1743 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1744 } 1745 1746 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1747 // This can be used to remove results from the search result details before 1748 // the user has a chance to know that they were in the results 1749 if (!allAdded.isEmpty()) { 1750 1751 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1752 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1753 JpaPreResourceAccessDetails accessDetails = 1754 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1755 HookParams params = new HookParams() 1756 .add(IPreResourceAccessDetails.class, accessDetails) 1757 .add(RequestDetails.class, request) 1758 .addIfMatchesType(ServletRequestDetails.class, request); 1759 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1760 1761 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1762 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1763 JpaPid value = includedPidList.remove(i); 1764 if (value != null) { 1765 allAdded.remove(value); 1766 } 1767 } 1768 } 1769 } 1770 } 1771 1772 return allAdded; 1773 } 1774 1775 private void loadIncludesMatchSpecific( 1776 Include nextInclude, 1777 FhirContext fhirContext, 1778 String findPidFieldName, 1779 String findPartitionFieldName, 1780 String findVersionFieldName, 1781 String searchPidFieldName, 1782 boolean reverseMode, 1783 List<JpaPid> nextRoundMatches, 1784 EntityManager entityManager, 1785 Integer maxCount, 1786 HashSet<JpaPid> pidsToInclude, 1787 RequestDetails theRequest) { 1788 List<String> paths; 1789 1790 // Start replace 1791 RuntimeSearchParam param; 1792 String resType = nextInclude.getParamType(); 1793 if (isBlank(resType)) { 1794 return; 1795 } 1796 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1797 if (def == null) { 1798 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1799 return; 1800 } 1801 1802 String paramName = nextInclude.getParamName(); 1803 if (isNotBlank(paramName)) { 1804 param = mySearchParamRegistry.getActiveSearchParam( 1805 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1806 } else { 1807 param = null; 1808 } 1809 if (param == null) { 1810 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1811 return; 1812 } 1813 1814 paths = param.getPathsSplitForResourceType(resType); 1815 // end replace 1816 1817 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1818 1819 for (String nextPath : paths) { 1820 String findPidFieldSqlColumn = 1821 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1822 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1823 if (findVersionFieldName != null) { 1824 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1825 } 1826 if (myPartitionSettings.isDatabasePartitionMode()) { 1827 fieldsToLoad += ", r."; 1828 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1829 ? "partition_id" 1830 : "target_res_partition_id"; 1831 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1832 } 1833 1834 // Query for includes lookup has 2 cases 1835 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1836 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1837 // url in target_resource_url 1838 1839 // Case 1: 1840 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1841 1842 String searchPidFieldSqlColumn = 1843 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1844 StringBuilder localReferenceQuery = new StringBuilder(); 1845 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1846 localReferenceQuery.append(" FROM hfj_res_link r "); 1847 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1848 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1849 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1850 } 1851 localReferenceQuery 1852 .append(" AND r.") 1853 .append(searchPidFieldSqlColumn) 1854 .append(" IN (:target_pids) "); 1855 if (myPartitionSettings.isDatabasePartitionMode()) { 1856 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1857 ? "target_res_partition_id" 1858 : "partition_id"; 1859 localReferenceQuery 1860 .append("AND r.") 1861 .append(partitionFieldToSearch) 1862 .append(" = :search_partition_id "); 1863 } 1864 localReferenceQueryParams.put("src_path", nextPath); 1865 // we loop over target_pids later. 1866 if (targetResourceTypes != null) { 1867 if (targetResourceTypes.size() == 1) { 1868 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1869 localReferenceQueryParams.put( 1870 "target_resource_type", 1871 targetResourceTypes.iterator().next()); 1872 } else { 1873 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1874 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1875 } 1876 } 1877 1878 // Case 2: 1879 Pair<String, Map<String, Object>> canonicalQuery = 1880 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1881 1882 String sql = localReferenceQuery.toString(); 1883 if (canonicalQuery != null) { 1884 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1885 } 1886 1887 Map<String, Object> limitParams = new HashMap<>(); 1888 if (maxCount != null) { 1889 LinkedList<Object> bindVariables = new LinkedList<>(); 1890 sql = SearchQueryBuilder.applyLimitToSql( 1891 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1892 1893 // The dialect SQL limiter uses positional params, but we're using 1894 // named params here, so we need to replace the positional params 1895 // with equivalent named ones 1896 StringBuilder sb = new StringBuilder(); 1897 for (int i = 0; i < sql.length(); i++) { 1898 char nextChar = sql.charAt(i); 1899 if (nextChar == '?') { 1900 String nextName = "limit" + i; 1901 sb.append(':').append(nextName); 1902 limitParams.put(nextName, bindVariables.removeFirst()); 1903 } else { 1904 sb.append(nextChar); 1905 } 1906 } 1907 sql = sb.toString(); 1908 } 1909 1910 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1911 for (Collection<JpaPid> nextPartition : partitions) { 1912 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1913 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1914 if (myPartitionSettings.isDatabasePartitionMode()) { 1915 q.setParameter( 1916 "search_partition_id", 1917 nextPartition.iterator().next().getPartitionId()); 1918 } 1919 localReferenceQueryParams.forEach(q::setParameter); 1920 if (canonicalQuery != null) { 1921 canonicalQuery.getRight().forEach(q::setParameter); 1922 } 1923 limitParams.forEach(q::setParameter); 1924 1925 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1926 Tuple result; 1927 while (iter.hasNext()) { 1928 result = iter.next(); 1929 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1930 Long resourceVersion = null; 1931 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1932 resourceVersion = 1933 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1934 } 1935 Integer partitionId = null; 1936 if (myPartitionSettings.isDatabasePartitionMode()) { 1937 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1938 } 1939 1940 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1941 pid.setPartitionId(partitionId); 1942 pidsToInclude.add(pid); 1943 } 1944 } 1945 // myEntityManager.clear(); 1946 } 1947 } 1948 } 1949 1950 private void loadIncludesMatchAll( 1951 String findPidFieldName, 1952 String findPartitionFieldName, 1953 String findResourceTypeFieldName, 1954 String findVersionFieldName, 1955 String searchPidFieldName, 1956 String searchPartitionFieldName, 1957 String wantResourceType, 1958 boolean reverseMode, 1959 boolean hasDesiredResourceTypes, 1960 List<JpaPid> nextRoundMatches, 1961 EntityManager entityManager, 1962 Integer maxCount, 1963 List<String> desiredResourceTypes, 1964 HashSet<JpaPid> pidsToInclude, 1965 RequestDetails request) { 1966 1967 record IncludesRecord( 1968 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1969 1970 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1971 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1972 Root<ResourceLink> root = query.from(ResourceLink.class); 1973 1974 List<Selection<?>> selectionList = new ArrayList<>(); 1975 selectionList.add(root.get(findPidFieldName)); 1976 selectionList.add(root.get(findResourceTypeFieldName)); 1977 selectionList.add(root.get("myTargetResourceUrl")); 1978 if (findVersionFieldName != null) { 1979 selectionList.add(root.get(findVersionFieldName)); 1980 } else { 1981 selectionList.add(cb.nullLiteral(Long.class)); 1982 } 1983 if (myPartitionSettings.isDatabasePartitionMode()) { 1984 selectionList.add(root.get(findPartitionFieldName)); 1985 } else { 1986 selectionList.add(cb.nullLiteral(Integer.class)); 1987 } 1988 query.multiselect(selectionList); 1989 1990 List<Predicate> predicates = new ArrayList<>(); 1991 1992 if (myPartitionSettings.isDatabasePartitionMode()) { 1993 predicates.add( 1994 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 1995 } 1996 1997 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 1998 1999 /* 2000 * We need to set the resource type in 2 cases only: 2001 * 1) we are in $everything mode 2002 * (where we only want to fetch specific resource types, regardless of what is 2003 * available to fetch) 2004 * 2) we are doing revincludes 2005 * 2006 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 2007 * should always be checking the source resource type on the resource link. We don't 2008 * actually index that column though by default, so in order to try and be efficient 2009 * we don't actually include it for includes (but we do for revincludes). This is 2010 * because for an include, it doesn't really make sense to include a different 2011 * resource type than the one you are searching on. 2012 */ 2013 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 2014 // because mySourceResourceType is not part of the HFJ_RES_LINK 2015 // index, this might not be the most optimal performance. 2016 // but it is for an $everything operation (and maybe we should update the index) 2017 predicates.add( 2018 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 2019 } else { 2020 wantResourceType = null; 2021 } 2022 2023 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 2024 // resources 2025 // (e.g. via Provenance, List, or Group) when in an $everything operation 2026 if (myParams != null 2027 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 2028 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 2029 predicates.add(cb.not(root.get("mySourceResourceType") 2030 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 2031 } 2032 2033 if (hasDesiredResourceTypes) { 2034 predicates.add( 2035 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 2036 } 2037 2038 query.where(cb.and(predicates.toArray(new Predicate[0]))); 2039 2040 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 2041 for (Collection<JpaPid> nextPartition : partitions) { 2042 2043 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 2044 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 2045 if (myPartitionSettings.isDatabasePartitionMode()) { 2046 q.setParameter( 2047 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 2048 } 2049 if (wantResourceType != null) { 2050 q.setParameter("want_resource_type", wantResourceType); 2051 } 2052 if (maxCount != null) { 2053 q.setMaxResults(maxCount); 2054 } 2055 if (hasDesiredResourceTypes) { 2056 q.setParameter("desired_target_resource_types", desiredResourceTypes); 2057 } 2058 2059 Set<String> canonicalUrls = null; 2060 2061 try (ScrollableResultsIterator<IncludesRecord> iter = 2062 new ScrollableResultsIterator<>(toScrollableResults(q))) { 2063 IncludesRecord nextRow; 2064 while (iter.hasNext()) { 2065 nextRow = iter.next(); 2066 if (nextRow == null) { 2067 // This can happen if there are outgoing references which are canonical or point to 2068 // other servers 2069 continue; 2070 } 2071 2072 Long version = nextRow.version; 2073 Long resourceId = nextRow.resourceId; 2074 String resourceType = nextRow.resourceType; 2075 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 2076 Integer partitionId = nextRow.partitionId; 2077 2078 if (resourceId != null) { 2079 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 2080 pid.setPartitionId(partitionId); 2081 pidsToInclude.add(pid); 2082 } else if (resourceCanonicalUrl != null) { 2083 if (canonicalUrls == null) { 2084 canonicalUrls = new HashSet<>(); 2085 } 2086 canonicalUrls.add(resourceCanonicalUrl); 2087 } 2088 } 2089 } 2090 2091 if (canonicalUrls != null) { 2092 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 2093 } 2094 } 2095 } 2096 2097 private void loadCanonicalUrls( 2098 RequestDetails theRequestDetails, 2099 Set<String> theCanonicalUrls, 2100 EntityManager theEntityManager, 2101 HashSet<JpaPid> thePidsToInclude, 2102 boolean theReverse) { 2103 StringBuilder sqlBuilder; 2104 CanonicalUrlTargets canonicalUrlTargets = 2105 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2106 if (canonicalUrlTargets.isEmpty()) { 2107 return; 2108 } 2109 2110 String message = 2111 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2112 firePerformanceWarning(theRequestDetails, message); 2113 2114 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2115 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2116 2117 sqlBuilder = new StringBuilder(); 2118 sqlBuilder.append("SELECT "); 2119 if (myPartitionSettings.isPartitioningEnabled()) { 2120 sqlBuilder.append("i.myPartitionIdValue, "); 2121 } 2122 sqlBuilder.append("i.myResourcePid "); 2123 2124 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2125 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2126 sqlBuilder.append("AND i.myUri IN (:uris)"); 2127 2128 String canonicalResSql = sqlBuilder.toString(); 2129 2130 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2131 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2132 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2133 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2134 List<Object[]> results = canonicalResIdQuery.getResultList(); 2135 for (var next : results) { 2136 if (next != null) { 2137 Integer partitionId = null; 2138 Long pid; 2139 if (next.length == 1) { 2140 pid = (Long) next[0]; 2141 } else { 2142 partitionId = (Integer) ((Object[]) next)[0]; 2143 pid = (Long) ((Object[]) next)[1]; 2144 } 2145 if (pid != null) { 2146 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2147 } 2148 } 2149 } 2150 } 2151 } 2152 2153 /** 2154 * Calls Performance Trace Hook 2155 * 2156 * @param request the request deatils 2157 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2158 */ 2159 private void callRawSqlHookWithCurrentThreadQueries( 2160 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2161 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2162 HookParams params = new HookParams() 2163 .add(RequestDetails.class, request) 2164 .addIfMatchesType(ServletRequestDetails.class, request) 2165 .add(SqlQueryList.class, capturedQueries); 2166 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2167 } 2168 2169 @Nullable 2170 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2171 String targetResourceType = nextInclude.getParamTargetType(); 2172 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2173 Set<String> targetResourceTypes; 2174 if (targetResourceType != null) { 2175 targetResourceTypes = Set.of(targetResourceType); 2176 } else if (haveTargetTypesDefinedByParam) { 2177 targetResourceTypes = param.getTargets(); 2178 } else { 2179 // all types! 2180 targetResourceTypes = null; 2181 } 2182 return targetResourceTypes; 2183 } 2184 2185 @Nullable 2186 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2187 String theVersionFieldName, 2188 Set<String> theTargetResourceTypes, 2189 boolean theReverse, 2190 RequestDetails theRequest, 2191 RuntimeSearchParam theParam) { 2192 2193 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2194 2195 // If we know for sure that none of the paths involved in this SearchParameter could 2196 // be indexing a canonical 2197 if (Arrays.stream(searchParameterPaths) 2198 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2199 myContext, myResourceName, t, theReverse))) { 2200 return null; 2201 } 2202 2203 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2204 if (theVersionFieldName != null) { 2205 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2206 fieldsToLoadFromSpidxUriTable += ", NULL"; 2207 } 2208 2209 if (myPartitionSettings.isDatabasePartitionMode()) { 2210 if (theReverse) { 2211 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2212 } else { 2213 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2214 } 2215 } 2216 2217 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2218 // But sp_name isn't indexed, so we use hash_identity instead. 2219 CanonicalUrlTargets canonicalUrlTargets = 2220 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2221 if (canonicalUrlTargets.isEmpty()) { 2222 return null; 2223 } 2224 2225 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2226 StringBuilder canonicalUrlQuery = new StringBuilder(); 2227 canonicalUrlQuery 2228 .append("SELECT ") 2229 .append(fieldsToLoadFromSpidxUriTable) 2230 .append(' '); 2231 canonicalUrlQuery.append("FROM hfj_res_link r "); 2232 2233 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2234 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2235 if (myPartitionSettings.isDatabasePartitionMode()) { 2236 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2237 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2238 } 2239 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2240 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2241 canonicalUriQueryParams.put( 2242 "uri_identity_hash", 2243 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2244 } else { 2245 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2246 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2247 } 2248 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2249 canonicalUrlQuery.append(")"); 2250 2251 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2252 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2253 canonicalUrlQuery.append(" AND"); 2254 if (myPartitionSettings.isDatabasePartitionMode()) { 2255 if (theReverse) { 2256 canonicalUrlQuery.append(" rUri.partition_id"); 2257 } else { 2258 canonicalUrlQuery.append(" r.partition_id"); 2259 } 2260 canonicalUrlQuery.append(" = :search_partition_id"); 2261 canonicalUrlQuery.append(" AND"); 2262 } 2263 if (theReverse) { 2264 canonicalUrlQuery.append(" rUri.res_id"); 2265 } else { 2266 canonicalUrlQuery.append(" r.src_resource_id"); 2267 } 2268 canonicalUrlQuery.append(" IN (:target_pids)"); 2269 2270 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2271 } 2272 2273 @Nonnull 2274 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2275 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2276 Set<String> targetResourceTypes = theTargetResourceTypes; 2277 if (targetResourceTypes == null) { 2278 /* 2279 * If we don't have a list of valid target types, we need to figure out a list of all 2280 * possible target types in order to perform the search of the URI index table. This is 2281 * because the hash_identity column encodes the resource type, so we'll need a hash 2282 * value for each possible target type. 2283 */ 2284 targetResourceTypes = new HashSet<>(); 2285 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2286 if (theReverse) { 2287 // For reverse includes, it is really hard to figure out what types 2288 // are actually potentially pointing to the type we're searching for 2289 // in this context, so let's just assume it could be anything. 2290 targetResourceTypes = possibleTypes; 2291 } else { 2292 List<RuntimeSearchParam> params = mySearchParamRegistry 2293 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2294 .values() 2295 .stream() 2296 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2297 .toList(); 2298 for (var next : params) { 2299 2300 String paths = next.getPath(); 2301 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2302 2303 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2304 myContext, myResourceName, path, theReverse)) { 2305 continue; 2306 } 2307 2308 if (!next.getTargets().isEmpty()) { 2309 // For each reference parameter on the resource type we're searching for, 2310 // add all the potential target types to the list of possible target 2311 // resource types we can look up. 2312 for (var nextTarget : next.getTargets()) { 2313 if (possibleTypes.contains(nextTarget)) { 2314 targetResourceTypes.add(nextTarget); 2315 } 2316 } 2317 } else { 2318 // If we have any references that don't define any target types, then 2319 // we need to assume that all enabled resource types are possible target 2320 // types 2321 targetResourceTypes.addAll(possibleTypes); 2322 break; 2323 } 2324 } 2325 } 2326 } 2327 } 2328 2329 if (targetResourceTypes.isEmpty()) { 2330 return new CanonicalUrlTargets(Set.of(), Set.of()); 2331 } 2332 2333 Set<Long> hashIdentityValues = new HashSet<>(); 2334 Set<Integer> partitionIds = new HashSet<>(); 2335 for (String type : targetResourceTypes) { 2336 2337 RequestPartitionId readPartition; 2338 if (myPartitionSettings.isPartitioningEnabled()) { 2339 readPartition = 2340 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2341 } else { 2342 readPartition = RequestPartitionId.defaultPartition(); 2343 } 2344 if (readPartition.hasPartitionIds()) { 2345 partitionIds.addAll(readPartition.getPartitionIds()); 2346 } 2347 2348 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2349 myPartitionSettings, readPartition, type, "url"); 2350 hashIdentityValues.add(hashIdentity); 2351 } 2352 2353 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2354 } 2355 2356 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2357 public boolean isEmpty() { 2358 return hashIdentityValues.isEmpty(); 2359 } 2360 } 2361 2362 /** 2363 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2364 * those pids where: 2365 * <ul> 2366 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2367 * <li>Each list only contains JpaPids with the same partition ID</li> 2368 * </ul> 2369 */ 2370 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2371 2372 if (theNextRoundMatches.size() <= theMaxLoad) { 2373 boolean allSamePartition = true; 2374 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2375 if (!Objects.equals( 2376 theNextRoundMatches.get(i - 1).getPartitionId(), 2377 theNextRoundMatches.get(i).getPartitionId())) { 2378 allSamePartition = false; 2379 break; 2380 } 2381 } 2382 if (allSamePartition) { 2383 return Collections.singletonList(theNextRoundMatches); 2384 } 2385 } 2386 2387 // Break into partitioned sublists 2388 ListMultimap<String, JpaPid> lists = 2389 MultimapBuilder.hashKeys().arrayListValues().build(); 2390 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2391 String partitionId = nextRoundMatch.getPartitionId() != null 2392 ? nextRoundMatch.getPartitionId().toString() 2393 : ""; 2394 lists.put(partitionId, nextRoundMatch); 2395 } 2396 2397 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2398 for (String key : lists.keySet()) { 2399 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2400 retVal.addAll(nextPartition); 2401 } 2402 2403 // In unit test mode, we sort the results just for unit test predictability 2404 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2405 retVal = retVal.stream() 2406 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2407 .collect(Collectors.toList()); 2408 } 2409 2410 return retVal; 2411 } 2412 2413 private void attemptComboUniqueSpProcessing( 2414 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2415 RuntimeSearchParam comboParam = null; 2416 List<String> comboParamNames = null; 2417 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2418 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2419 if (!exactMatchParams.isEmpty()) { 2420 comboParam = exactMatchParams.get(0); 2421 comboParamNames = new ArrayList<>(theParams.keySet()); 2422 } 2423 2424 if (comboParam == null) { 2425 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2426 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2427 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2428 List<String> nextCandidateParamNames = 2429 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2430 .map(RuntimeSearchParam::getName) 2431 .collect(Collectors.toList()); 2432 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2433 comboParam = nextCandidate; 2434 comboParamNames = nextCandidateParamNames; 2435 break; 2436 } 2437 } 2438 } 2439 2440 if (comboParam != null) { 2441 Collections.sort(comboParamNames); 2442 2443 // Since we're going to remove elements below 2444 theParams.values().forEach(this::ensureSubListsAreWritable); 2445 2446 /* 2447 * Apply search against the combo param index in a loop: 2448 * 2449 * 1. First we check whether the actual parameter values in the 2450 * parameter map are actually usable for searching against the combo 2451 * param index. E.g. no search modifiers, date comparators, etc., 2452 * since these mean you can't use the combo index. 2453 * 2454 * 2. Apply and create the join SQl. We remove parameter values from 2455 * the map as we apply them, so any parameter values remaining in the 2456 * map after each loop haven't yet been factored into the SQL. 2457 * 2458 * The loop allows us to create multiple combo index joins if there 2459 * are multiple AND expressions for the related parameters. 2460 */ 2461 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2462 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2463 } 2464 } 2465 } 2466 2467 private void applyComboSearchParam( 2468 QueryStack theQueryStack, 2469 @Nonnull SearchParameterMap theParams, 2470 RequestDetails theRequest, 2471 List<String> theComboParamNames, 2472 RuntimeSearchParam theComboParam) { 2473 2474 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2475 for (String nextParamName : theComboParamNames) { 2476 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2477 inputs.add(nextValues); 2478 } 2479 2480 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2481 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2482 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2483 2484 StringBuilder searchStringBuilder = new StringBuilder(); 2485 searchStringBuilder.append(myResourceName); 2486 searchStringBuilder.append("?"); 2487 2488 boolean first = true; 2489 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2490 2491 String nextParamName = theComboParamNames.get(paramIndex); 2492 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2493 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2494 // As a result, we strip the prefix if present. 2495 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(), EQUAL.getValue()); 2496 2497 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2498 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2499 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2500 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2501 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2502 } 2503 } 2504 2505 if (first) { 2506 first = false; 2507 } else { 2508 searchStringBuilder.append('&'); 2509 } 2510 2511 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2512 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2513 2514 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2515 } 2516 2517 String indexString = searchStringBuilder.toString(); 2518 ourLog.debug( 2519 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2520 2521 indexStrings.add(indexString); 2522 } 2523 2524 // Just to make sure we're stable for tests 2525 indexStrings.sort(Comparator.naturalOrder()); 2526 2527 // Interceptor broadcast: JPA_PERFTRACE_INFO 2528 IInterceptorBroadcaster compositeBroadcaster = 2529 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2530 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2531 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2532 StorageProcessingMessage msg = new StorageProcessingMessage() 2533 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2534 + indexStringForLog); 2535 HookParams params = new HookParams() 2536 .add(RequestDetails.class, theRequest) 2537 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2538 .add(StorageProcessingMessage.class, msg); 2539 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2540 } 2541 2542 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2543 case UNIQUE: 2544 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2545 break; 2546 case NON_UNIQUE: 2547 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2548 break; 2549 } 2550 2551 // Remove any empty parameters remaining after this 2552 theParams.clean(); 2553 } 2554 2555 /** 2556 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2557 * searching against a combo param with the given parameter names. This might be {@literal false} if 2558 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2559 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2560 */ 2561 private boolean validateParamValuesAreValidForComboParam( 2562 RequestDetails theRequest, 2563 @Nonnull SearchParameterMap theParams, 2564 List<String> theComboParamNames, 2565 RuntimeSearchParam theComboParam) { 2566 boolean paramValuesAreValidForCombo = true; 2567 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2568 2569 for (String nextParamName : theComboParamNames) { 2570 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2571 2572 if (nextValues == null || nextValues.isEmpty()) { 2573 paramValuesAreValidForCombo = false; 2574 break; 2575 } 2576 2577 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2578 paramOrValues.add(nextAndValue); 2579 2580 for (IQueryParameterType nextOrValue : nextAndValue) { 2581 if (nextOrValue instanceof DateParam dateParam) { 2582 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2583 String message = "Search with params " + theComboParamNames 2584 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2585 + nextParamName + "'"; 2586 firePerformanceInfo(theRequest, message); 2587 paramValuesAreValidForCombo = false; 2588 break; 2589 } 2590 } 2591 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2592 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2593 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2594 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2595 if (prefix != null && prefix != EQUAL) { 2596 String message = "Search with params " + theComboParamNames 2597 + " is not a candidate for combo searching - Parameter '" + nextParamName 2598 + "' has prefix: '" 2599 + paramWithPrefix.getPrefix().getValue() + "'"; 2600 firePerformanceInfo(theRequest, message); 2601 paramValuesAreValidForCombo = false; 2602 break; 2603 } 2604 } 2605 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2606 String message = "Search with params " + theComboParamNames 2607 + " is not a candidate for combo searching - Parameter '" + nextParamName 2608 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2609 firePerformanceInfo(theRequest, message); 2610 paramValuesAreValidForCombo = false; 2611 break; 2612 } 2613 } 2614 2615 // Reference params are only eligible for using a composite index if they 2616 // are qualified 2617 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2618 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2619 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2620 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2621 if (isBlank(param.getResourceType())) { 2622 ourLog.debug( 2623 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2624 paramValuesAreValidForCombo = false; 2625 break; 2626 } 2627 } 2628 2629 // Date params are not eligible for using composite unique index 2630 // as index could contain date with different precision (e.g. DAY, SECOND) 2631 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2632 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2633 ourLog.debug( 2634 "Search with params {} is not a candidate for combo searching - " 2635 + "Unique combo search parameter '{}' has DATE type", 2636 theComboParamNames, 2637 nextParamName); 2638 paramValuesAreValidForCombo = false; 2639 break; 2640 } 2641 } 2642 2643 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2644 ourLog.debug( 2645 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2646 paramValuesAreValidForCombo = false; 2647 } 2648 2649 return paramValuesAreValidForCombo; 2650 } 2651 2652 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2653 for (int i = 0; i < theListOfLists.size(); i++) { 2654 List<T> oldSubList = theListOfLists.get(i); 2655 if (!(oldSubList instanceof ArrayList)) { 2656 List<T> newSubList = new ArrayList<>(oldSubList); 2657 theListOfLists.set(i, newSubList); 2658 } 2659 } 2660 } 2661 2662 @Override 2663 public void setFetchSize(int theFetchSize) { 2664 myFetchSize = theFetchSize; 2665 } 2666 2667 public SearchParameterMap getParams() { 2668 return myParams; 2669 } 2670 2671 public CriteriaBuilder getBuilder() { 2672 return myCriteriaBuilder; 2673 } 2674 2675 public Class<? extends IBaseResource> getResourceType() { 2676 return myResourceType; 2677 } 2678 2679 public String getResourceName() { 2680 return myResourceName; 2681 } 2682 2683 /** 2684 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2685 */ 2686 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2687 2688 private final RequestDetails myRequest; 2689 private final Set<JpaPid> myCurrentPids; 2690 private Iterator<JpaPid> myCurrentIterator; 2691 private JpaPid myNext; 2692 2693 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2694 myCurrentPids = new HashSet<>(thePidSet); 2695 myCurrentIterator = null; 2696 myRequest = theRequest; 2697 } 2698 2699 private void fetchNext() { 2700 while (myNext == null) { 2701 2702 if (myCurrentIterator == null) { 2703 Set<Include> includes = new HashSet<>(); 2704 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2705 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2706 for (IQueryParameterType type : typeList) { 2707 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken()); 2708 for (String resourceType : queryString.split(",")) { 2709 String rt = resourceType.trim(); 2710 if (isNotBlank(rt)) { 2711 includes.add(new Include(rt + ":*", true)); 2712 } 2713 } 2714 } 2715 } 2716 } 2717 if (includes.isEmpty()) { 2718 includes.add(new Include("*", true)); 2719 } 2720 Set<JpaPid> newPids = loadIncludes( 2721 myContext, 2722 myEntityManager, 2723 myCurrentPids, 2724 includes, 2725 false, 2726 getParams().getLastUpdated(), 2727 mySearchUuid, 2728 myRequest, 2729 null); 2730 myCurrentIterator = newPids.iterator(); 2731 } 2732 2733 if (myCurrentIterator.hasNext()) { 2734 myNext = myCurrentIterator.next(); 2735 } else { 2736 myNext = NO_MORE; 2737 } 2738 } 2739 } 2740 2741 @Override 2742 public boolean hasNext() { 2743 fetchNext(); 2744 return !NO_MORE.equals(myNext); 2745 } 2746 2747 @Override 2748 public JpaPid next() { 2749 fetchNext(); 2750 JpaPid retVal = myNext; 2751 myNext = null; 2752 return retVal; 2753 } 2754 } 2755 /** 2756 * Basic Query iterator, used to fetch the results of a query. 2757 */ 2758 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2759 2760 private final SearchRuntimeDetails mySearchRuntimeDetails; 2761 2762 private final RequestDetails myRequest; 2763 private final boolean myHaveRawSqlHooks; 2764 private final boolean myHavePerfTraceFoundIdHook; 2765 private final Integer myOffset; 2766 private final IInterceptorBroadcaster myCompositeBroadcaster; 2767 private boolean myFirst = true; 2768 private IncludesIterator myIncludesIterator; 2769 /** 2770 * The next JpaPid value of the next result in this query. 2771 * Will not be null if fetched using getNext() 2772 */ 2773 private JpaPid myNext; 2774 /** 2775 * The current query result iterator running sql and supplying PIDs 2776 * @see #myQueryList 2777 */ 2778 private ISearchQueryExecutor myResultsIterator; 2779 2780 private boolean myFetchIncludesForEverythingOperation; 2781 2782 /** 2783 * The count of resources skipped because they were seen in earlier results 2784 */ 2785 private int mySkipCount = 0; 2786 /** 2787 * The count of resources that are new in this search 2788 * (ie, not cached in previous searches) 2789 */ 2790 private int myNonSkipCount = 0; 2791 /** 2792 * The list of queries to use to find all results. 2793 * Normal JPA queries will normally have a single entry. 2794 * Queries that involve Hibernate Search/Elasticsearch may have 2795 * multiple queries because of chunking. 2796 * The $everything operation also jams some extra results in. 2797 */ 2798 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2799 2800 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2801 mySearchRuntimeDetails = theSearchRuntimeDetails; 2802 myOffset = myParams.getOffset(); 2803 myRequest = theRequest; 2804 myCompositeBroadcaster = 2805 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2806 2807 // everything requires fetching recursively all related resources 2808 if (myParams.getEverythingMode() != null) { 2809 myFetchIncludesForEverythingOperation = true; 2810 } 2811 2812 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2813 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2814 } 2815 2816 private void fetchNext() { 2817 try { 2818 if (myHaveRawSqlHooks) { 2819 CurrentThreadCaptureQueriesListener.startCapturing(); 2820 } 2821 2822 // If we don't have a query yet, create one 2823 if (myResultsIterator == null) { 2824 if (!mySearchProperties.hasMaxResultsRequested()) { 2825 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2826 } 2827 2828 /* 2829 * assigns the results iterator 2830 * and populates the myQueryList. 2831 */ 2832 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2833 } 2834 2835 if (myNext == null) { 2836 // no next means we need a new query (if one is available) 2837 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2838 /* 2839 * Because we combine our DB searches with Lucene 2840 * sometimes we can have multiple results iterators 2841 * (with only some having data in them to extract). 2842 * 2843 * We'll iterate our results iterators until we 2844 * either run out of results iterators, or we 2845 * have one that actually has data in it. 2846 */ 2847 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2848 retrieveNextIteratorQuery(); 2849 } 2850 2851 if (!myResultsIterator.hasNext()) { 2852 // we couldn't find a results iterator; 2853 // we're done here 2854 break; 2855 } 2856 2857 JpaPid nextPid = myResultsIterator.next(); 2858 if (myHavePerfTraceFoundIdHook) { 2859 callPerformanceTracingHook(nextPid); 2860 } 2861 2862 if (nextPid != null) { 2863 if (!myPidSet.contains(nextPid)) { 2864 if (!mySearchProperties.isDeduplicateInDatabase()) { 2865 /* 2866 * We only add to the map if we aren't fetching "everything"; 2867 * otherwise, we let the de-duplication happen in the database 2868 * (see createChunkedQueryNormalSearch above), because it 2869 * saves memory that way. 2870 */ 2871 myPidSet.add(nextPid); 2872 } 2873 if (doNotSkipNextPidForEverything()) { 2874 myNext = nextPid; 2875 myNonSkipCount++; 2876 break; 2877 } 2878 } else { 2879 mySkipCount++; 2880 } 2881 } 2882 2883 if (!myResultsIterator.hasNext()) { 2884 if (mySearchProperties.hasMaxResultsRequested() 2885 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2886 if (mySkipCount > 0 && myNonSkipCount == 0) { 2887 sendProcessingMsgAndFirePerformanceHook(); 2888 // need the next iterator; increase the maxsize 2889 // (we should always do this) 2890 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2891 mySearchProperties.setMaxResultsRequested(maxResults); 2892 2893 if (!mySearchProperties.isDeduplicateInDatabase()) { 2894 // if we're not using the database to deduplicate 2895 // we should recheck our memory usage 2896 // the prefetch size check is future proofing 2897 int prefetchSize = myStorageSettings 2898 .getSearchPreFetchThresholds() 2899 .size(); 2900 if (prefetchSize > 0) { 2901 if (myStorageSettings 2902 .getSearchPreFetchThresholds() 2903 .get(prefetchSize - 1) 2904 < mySearchProperties.getMaxResultsRequested()) { 2905 mySearchProperties.setDeduplicateInDatabase(true); 2906 } 2907 } 2908 } 2909 2910 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2911 } 2912 } 2913 } 2914 } 2915 } 2916 2917 if (myNext == null) { 2918 // if we got here, it means the current JpaPid has already been processed, 2919 // and we will decide (here) if we need to fetch related resources recursively 2920 if (myFetchIncludesForEverythingOperation) { 2921 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2922 myFetchIncludesForEverythingOperation = false; 2923 } 2924 if (myIncludesIterator != null) { 2925 while (myIncludesIterator.hasNext()) { 2926 JpaPid next = myIncludesIterator.next(); 2927 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2928 myNext = next; 2929 break; 2930 } 2931 } 2932 if (myNext == null) { 2933 myNext = NO_MORE; 2934 } 2935 } else { 2936 myNext = NO_MORE; 2937 } 2938 } 2939 2940 if (!mySearchProperties.hasMaxResultsRequested()) { 2941 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2942 } else { 2943 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2944 } 2945 2946 } finally { 2947 // search finished - fire hooks 2948 if (myHaveRawSqlHooks) { 2949 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2950 } 2951 } 2952 2953 if (myFirst) { 2954 HookParams params = new HookParams() 2955 .add(RequestDetails.class, myRequest) 2956 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2957 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2958 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2959 myFirst = false; 2960 } 2961 2962 if (NO_MORE.equals(myNext)) { 2963 HookParams params = new HookParams() 2964 .add(RequestDetails.class, myRequest) 2965 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2966 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2967 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2968 } 2969 } 2970 2971 private Integer calculateMaxResultsToFetch() { 2972 if (myParams.getLoadSynchronousUpTo() != null) { 2973 return myParams.getLoadSynchronousUpTo(); 2974 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2975 return myParams.getEverythingMode() != null 2976 ? myParams.getOffset() + myParams.getCount() 2977 : myParams.getCount(); 2978 } else { 2979 return myStorageSettings.getFetchSizeDefaultMaximum(); 2980 } 2981 } 2982 2983 private boolean doNotSkipNextPidForEverything() { 2984 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2985 } 2986 2987 private void callPerformanceTracingHook(JpaPid theNextPid) { 2988 HookParams params = new HookParams() 2989 .add(Integer.class, System.identityHashCode(this)) 2990 .add(Object.class, theNextPid); 2991 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2992 } 2993 2994 private void sendProcessingMsgAndFirePerformanceHook() { 2995 String msg = "Pass completed with no matching results seeking rows " 2996 + myPidSet.size() + "-" + mySkipCount 2997 + ". This indicates an inefficient query! Retrying with new max count of " 2998 + mySearchProperties.getMaxResultsRequested(); 2999 firePerformanceWarning(myRequest, msg); 3000 } 3001 3002 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 3003 Integer offset = theOffset; 3004 if (myQueryList.isEmpty()) { 3005 // Capture times for Lucene/Elasticsearch queries as well 3006 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3007 3008 // setting offset to 0 to fetch all resource ids to guarantee 3009 // correct output result for everything operation during paging 3010 if (myParams.getEverythingMode() != null) { 3011 offset = 0; 3012 } 3013 3014 SearchQueryProperties properties = mySearchProperties.clone(); 3015 properties 3016 .setOffset(offset) 3017 .setMaxResultsRequested(theMaxResultsToFetch) 3018 .setDoCountOnlyFlag(false) 3019 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 3020 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 3021 } 3022 3023 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3024 3025 retrieveNextIteratorQuery(); 3026 3027 mySkipCount = 0; 3028 myNonSkipCount = 0; 3029 } 3030 3031 private void retrieveNextIteratorQuery() { 3032 close(); 3033 if (isNotEmpty(myQueryList)) { 3034 myResultsIterator = myQueryList.remove(0); 3035 myHasNextIteratorQuery = true; 3036 } else { 3037 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 3038 myHasNextIteratorQuery = false; 3039 } 3040 } 3041 3042 @Override 3043 public boolean hasNext() { 3044 if (myNext == null) { 3045 fetchNext(); 3046 } 3047 return !NO_MORE.equals(myNext); 3048 } 3049 3050 @Override 3051 public JpaPid next() { 3052 fetchNext(); 3053 JpaPid retVal = myNext; 3054 myNext = null; 3055 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 3056 return retVal; 3057 } 3058 3059 @Override 3060 public int getSkippedCount() { 3061 return mySkipCount; 3062 } 3063 3064 @Override 3065 public int getNonSkippedCount() { 3066 return myNonSkipCount; 3067 } 3068 3069 @Override 3070 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 3071 Collection<JpaPid> batch = new ArrayList<>(); 3072 while (this.hasNext() && batch.size() < theBatchSize) { 3073 batch.add(this.next()); 3074 } 3075 return batch; 3076 } 3077 3078 @Override 3079 public void close() { 3080 if (myResultsIterator != null) { 3081 myResultsIterator.close(); 3082 } 3083 myResultsIterator = null; 3084 } 3085 } 3086 3087 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 3088 // Only log at debug level since these messages aren't considered important enough 3089 // that we should be cluttering the system log, but they are important to the 3090 // specific query being executed to we'll INFO level them there 3091 ourLog.debug(theMessage); 3092 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 3093 } 3094 3095 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3096 ourLog.warn(theMessage); 3097 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3098 } 3099 3100 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3101 IInterceptorBroadcaster compositeBroadcaster = 3102 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3103 if (compositeBroadcaster.hasHooks(thePointcut)) { 3104 StorageProcessingMessage message = new StorageProcessingMessage(); 3105 message.setMessage(theMessage); 3106 HookParams params = new HookParams() 3107 .add(RequestDetails.class, theRequest) 3108 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3109 .add(StorageProcessingMessage.class, message); 3110 compositeBroadcaster.callHooks(thePointcut, params); 3111 } 3112 } 3113 3114 public static int getMaximumPageSize() { 3115 if (myMaxPageSizeForTests != null) { 3116 return myMaxPageSizeForTests; 3117 } 3118 return MAXIMUM_PAGE_SIZE; 3119 } 3120 3121 public static void setMaxPageSizeForTest(Integer theTestSize) { 3122 myMaxPageSizeForTests = theTestSize; 3123 } 3124 3125 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3126 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3127 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3128 } 3129}