
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.model.util.JpaConstants; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 081import ca.uhn.fhir.jpa.util.SqlQueryList; 082import ca.uhn.fhir.model.api.IQueryParameterType; 083import ca.uhn.fhir.model.api.Include; 084import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 085import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 086import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 087import ca.uhn.fhir.rest.api.Constants; 088import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 089import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 090import ca.uhn.fhir.rest.api.SortOrderEnum; 091import ca.uhn.fhir.rest.api.SortSpec; 092import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 093import ca.uhn.fhir.rest.api.server.RequestDetails; 094import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 095import ca.uhn.fhir.rest.param.DateParam; 096import ca.uhn.fhir.rest.param.DateRangeParam; 097import ca.uhn.fhir.rest.param.ParamPrefixEnum; 098import ca.uhn.fhir.rest.param.ParameterUtil; 099import ca.uhn.fhir.rest.param.ReferenceParam; 100import ca.uhn.fhir.rest.param.StringParam; 101import ca.uhn.fhir.rest.param.TokenParam; 102import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 103import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 104import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 105import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 106import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 107import ca.uhn.fhir.svcs.ISearchLimiterSvc; 108import ca.uhn.fhir.system.HapiSystemProperties; 109import ca.uhn.fhir.util.SearchParameterUtil; 110import ca.uhn.fhir.util.StopWatch; 111import ca.uhn.fhir.util.StringUtil; 112import ca.uhn.fhir.util.UrlUtil; 113import com.google.common.annotations.VisibleForTesting; 114import com.google.common.collect.ListMultimap; 115import com.google.common.collect.Lists; 116import com.google.common.collect.MultimapBuilder; 117import com.healthmarketscience.sqlbuilder.Condition; 118import jakarta.annotation.Nonnull; 119import jakarta.annotation.Nullable; 120import jakarta.persistence.EntityManager; 121import jakarta.persistence.PersistenceContext; 122import jakarta.persistence.PersistenceContextType; 123import jakarta.persistence.Query; 124import jakarta.persistence.Tuple; 125import jakarta.persistence.TypedQuery; 126import jakarta.persistence.criteria.CriteriaBuilder; 127import jakarta.persistence.criteria.CriteriaQuery; 128import jakarta.persistence.criteria.Predicate; 129import jakarta.persistence.criteria.Root; 130import jakarta.persistence.criteria.Selection; 131import org.apache.commons.collections4.ListUtils; 132import org.apache.commons.lang3.StringUtils; 133import org.apache.commons.lang3.Validate; 134import org.apache.commons.lang3.math.NumberUtils; 135import org.apache.commons.lang3.tuple.Pair; 136import org.hibernate.ScrollMode; 137import org.hibernate.ScrollableResults; 138import org.hl7.fhir.instance.model.api.IAnyResource; 139import org.hl7.fhir.instance.model.api.IBaseResource; 140import org.hl7.fhir.instance.model.api.IIdType; 141import org.slf4j.Logger; 142import org.slf4j.LoggerFactory; 143import org.springframework.beans.factory.annotation.Autowired; 144import org.springframework.jdbc.core.JdbcTemplate; 145import org.springframework.transaction.support.TransactionSynchronizationManager; 146 147import java.util.ArrayList; 148import java.util.Arrays; 149import java.util.Collection; 150import java.util.Collections; 151import java.util.Comparator; 152import java.util.HashMap; 153import java.util.HashSet; 154import java.util.Iterator; 155import java.util.LinkedList; 156import java.util.List; 157import java.util.Map; 158import java.util.Objects; 159import java.util.Set; 160import java.util.stream.Collectors; 161 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 163import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 165import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 166import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 167import static java.util.Objects.requireNonNull; 168import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 169import static org.apache.commons.lang3.StringUtils.isBlank; 170import static org.apache.commons.lang3.StringUtils.isNotBlank; 171import static org.apache.commons.lang3.StringUtils.stripStart; 172 173/** 174 * The SearchBuilder is responsible for actually forming the SQL query that handles 175 * searches for resources 176 */ 177public class SearchBuilder implements ISearchBuilder<JpaPid> { 178 179 /** 180 * See loadResourcesByPid 181 * for an explanation of why we use the constant 800 182 */ 183 // NB: keep public 184 @Deprecated 185 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 186 187 public static final String RESOURCE_ID_ALIAS = "resource_id"; 188 public static final String PARTITION_ID_ALIAS = "partition_id"; 189 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 190 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 191 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 192 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 193 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 194 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 195 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 196 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 197 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 198 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 199 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 200 public static boolean myUseMaxPageSize50ForTest = false; 201 public static Integer myMaxPageSizeForTests = null; 202 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 203 protected final IResourceTagDao myResourceTagDao; 204 private String myResourceName; 205 private final Class<? extends IBaseResource> myResourceType; 206 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 207 private final SqlObjectFactory mySqlBuilderFactory; 208 private final HibernatePropertiesProvider myDialectProvider; 209 private final ISearchParamRegistry mySearchParamRegistry; 210 private final PartitionSettings myPartitionSettings; 211 private final DaoRegistry myDaoRegistry; 212 private final FhirContext myContext; 213 private final IIdHelperService<JpaPid> myIdHelperService; 214 private final JpaStorageSettings myStorageSettings; 215 private final SearchQueryProperties mySearchProperties; 216 private final IResourceHistoryTableDao myResourceHistoryTableDao; 217 private final IJpaStorageResourceParser myJpaStorageResourceParser; 218 219 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 220 protected EntityManager myEntityManager; 221 222 private CriteriaBuilder myCriteriaBuilder; 223 private SearchParameterMap myParams; 224 private String mySearchUuid; 225 private int myFetchSize; 226 227 private boolean myRequiresTotal; 228 229 /** 230 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 231 */ 232 private Set<JpaPid> myPidSet; 233 234 private boolean myHasNextIteratorQuery = false; 235 private RequestPartitionId myRequestPartitionId; 236 237 private IFulltextSearchSvc myFulltextSearchSvc; 238 239 private final ISearchLimiterSvc mySearchLimiterSvc; 240 241 @Autowired(required = false) 242 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 243 myFulltextSearchSvc = theFulltextSearchSvc; 244 } 245 246 @Autowired(required = false) 247 private IElasticsearchSvc myIElasticsearchSvc; 248 249 @Autowired 250 private IResourceHistoryTagDao myResourceHistoryTagDao; 251 252 @Autowired 253 private IRequestPartitionHelperSvc myPartitionHelperSvc; 254 255 /** 256 * Constructor 257 */ 258 @SuppressWarnings({"rawtypes", "unchecked"}) 259 public SearchBuilder( 260 String theResourceName, 261 JpaStorageSettings theStorageSettings, 262 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 263 SqlObjectFactory theSqlBuilderFactory, 264 HibernatePropertiesProvider theDialectProvider, 265 ISearchParamRegistry theSearchParamRegistry, 266 PartitionSettings thePartitionSettings, 267 IInterceptorBroadcaster theInterceptorBroadcaster, 268 IResourceTagDao theResourceTagDao, 269 DaoRegistry theDaoRegistry, 270 FhirContext theContext, 271 IIdHelperService theIdHelperService, 272 IResourceHistoryTableDao theResourceHistoryTagDao, 273 IJpaStorageResourceParser theIJpaStorageResourceParser, 274 ISearchLimiterSvc theSearchLimiterSvc, 275 Class<? extends IBaseResource> theResourceType) { 276 myResourceName = theResourceName; 277 myResourceType = theResourceType; 278 myStorageSettings = theStorageSettings; 279 mySearchLimiterSvc = theSearchLimiterSvc; 280 281 myEntityManagerFactory = theEntityManagerFactory; 282 mySqlBuilderFactory = theSqlBuilderFactory; 283 myDialectProvider = theDialectProvider; 284 mySearchParamRegistry = theSearchParamRegistry; 285 myPartitionSettings = thePartitionSettings; 286 myInterceptorBroadcaster = theInterceptorBroadcaster; 287 myResourceTagDao = theResourceTagDao; 288 myDaoRegistry = theDaoRegistry; 289 myContext = theContext; 290 myIdHelperService = theIdHelperService; 291 myResourceHistoryTableDao = theResourceHistoryTagDao; 292 myJpaStorageResourceParser = theIJpaStorageResourceParser; 293 294 mySearchProperties = new SearchQueryProperties(); 295 } 296 297 @VisibleForTesting 298 void setResourceName(String theName) { 299 myResourceName = theName; 300 } 301 302 @Override 303 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 304 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 305 } 306 307 @Override 308 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 309 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 310 } 311 312 @Override 313 public void setRequireTotal(boolean theRequireTotal) { 314 myRequiresTotal = theRequireTotal; 315 } 316 317 @Override 318 public boolean requiresTotal() { 319 return myRequiresTotal; 320 } 321 322 private void searchForIdsWithAndOr( 323 SearchQueryBuilder theSearchSqlBuilder, 324 QueryStack theQueryStack, 325 @Nonnull SearchParameterMap theParams, 326 RequestDetails theRequest) { 327 myParams = theParams; 328 mySearchProperties.setSortSpec(myParams.getSort()); 329 330 // Remove any empty parameters 331 theParams.clean(); 332 333 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 334 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 335 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 336 } 337 338 // Attempt to lookup via composite unique key. 339 if (isCompositeUniqueSpCandidate()) { 340 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 341 } 342 343 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 344 List<String> paramNames = myParams.keySet().stream() 345 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 346 .filter(t -> !t.equals(Constants.PARAM_TAG)) 347 .collect(Collectors.toList()); 348 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 349 paramNames.add(IAnyResource.SP_RES_ID); 350 } 351 if (myParams.containsKey(Constants.PARAM_TAG)) { 352 paramNames.add(Constants.PARAM_TAG); 353 } 354 355 // Handle each parameter 356 for (String nextParamName : paramNames) { 357 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 358 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 359 // Elasticsearch 360 continue; 361 } 362 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 363 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 364 .setParamName(nextParamName) 365 .setAndOrParams(andOrParams) 366 .setRequest(theRequest) 367 .setRequestPartitionId(myRequestPartitionId) 368 .setIncludeDeleted(myParams.getSearchIncludeDeletedMode())); 369 if (predicate != null) { 370 theSearchSqlBuilder.addPredicate(predicate); 371 } 372 } 373 } 374 375 /** 376 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 377 * parameters all have no modifiers. 378 */ 379 private boolean isCompositeUniqueSpCandidate() { 380 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 381 } 382 383 @SuppressWarnings("ConstantConditions") 384 @Override 385 public Long createCountQuery( 386 SearchParameterMap theParams, 387 String theSearchUuid, 388 RequestDetails theRequest, 389 @Nonnull RequestPartitionId theRequestPartitionId) { 390 391 assert theRequestPartitionId != null; 392 assert TransactionSynchronizationManager.isActualTransactionActive(); 393 394 init(theParams, theSearchUuid, theRequestPartitionId); 395 396 if (checkUseHibernateSearch()) { 397 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 398 } 399 400 SearchQueryProperties properties = mySearchProperties.clone(); 401 properties.setDoCountOnlyFlag(true); 402 properties.setSortSpec(null); // counts don't require sorts 403 properties.setMaxResultsRequested(null); 404 properties.setOffset(null); 405 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 406 if (queries.isEmpty()) { 407 return 0L; 408 } else { 409 JpaPid jpaPid = queries.get(0).next(); 410 return jpaPid.getId(); 411 } 412 } 413 414 /** 415 * @param thePidSet May be null 416 */ 417 @Override 418 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 419 myPidSet = new HashSet<>(thePidSet); 420 } 421 422 @SuppressWarnings("ConstantConditions") 423 @Override 424 public IResultIterator<JpaPid> createQuery( 425 SearchParameterMap theParams, 426 SearchRuntimeDetails theSearchRuntimeDetails, 427 RequestDetails theRequest, 428 @Nonnull RequestPartitionId theRequestPartitionId) { 429 assert theRequestPartitionId != null; 430 assert TransactionSynchronizationManager.isActualTransactionActive(); 431 432 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 433 434 if (myPidSet == null) { 435 myPidSet = new HashSet<>(); 436 } 437 438 return new QueryIterator(theSearchRuntimeDetails, theRequest); 439 } 440 441 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 442 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 443 // we mutate the params. Make a private copy. 444 myParams = theParams.clone(); 445 mySearchProperties.setSortSpec(myParams.getSort()); 446 mySearchUuid = theSearchUuid; 447 myRequestPartitionId = theRequestPartitionId; 448 } 449 450 /** 451 * The query created can be either a count query or the 452 * actual query. 453 * This is why it takes a SearchQueryProperties object 454 * (and doesn't use the local version of it). 455 * The properties may differ slightly for whichever 456 * query this is. 457 */ 458 private List<ISearchQueryExecutor> createQuery( 459 SearchParameterMap theParams, 460 SearchQueryProperties theSearchProperties, 461 RequestDetails theRequest, 462 SearchRuntimeDetails theSearchRuntimeDetails) { 463 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 464 465 if (checkUseHibernateSearch()) { 466 // we're going to run at least part of the search against the Fulltext service. 467 468 // Ugh - we have two different return types for now 469 ISearchQueryExecutor fulltextExecutor = null; 470 List<JpaPid> fulltextMatchIds = null; 471 int resultCount = 0; 472 if (myParams.isLastN()) { 473 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 474 resultCount = fulltextMatchIds.size(); 475 } else if (myParams.getEverythingMode() != null) { 476 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 477 resultCount = fulltextMatchIds.size(); 478 } else { 479 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 480 // enabled SP indexing). 481 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 482 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 483 // extra work in Elastic. 484 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 485 486 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 487 // a page. 488 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 489 } 490 491 if (fulltextExecutor == null) { 492 fulltextExecutor = 493 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 494 } 495 496 if (theSearchRuntimeDetails != null) { 497 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 498 IInterceptorBroadcaster compositeBroadcaster = 499 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 500 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 501 HookParams params = new HookParams() 502 .add(RequestDetails.class, theRequest) 503 .addIfMatchesType(ServletRequestDetails.class, theRequest) 504 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 505 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 506 } 507 } 508 509 // can we skip the database entirely and return the pid list from here? 510 boolean canSkipDatabase = 511 // if we processed an AND clause, and it returned nothing, then nothing can match. 512 !fulltextExecutor.hasNext() 513 || 514 // Our hibernate search query doesn't respect partitions yet 515 (!myPartitionSettings.isPartitioningEnabled() 516 && 517 // were there AND terms left? Then we still need the db. 518 theParams.isEmpty() 519 && 520 // not every param is a param. :-( 521 theParams.getNearDistanceParam() == null 522 && 523 // todo MB don't we support _lastUpdated and _offset now? 524 theParams.getLastUpdated() == null 525 && theParams.getEverythingMode() == null 526 && theParams.getOffset() == null); 527 528 if (canSkipDatabase) { 529 ourLog.trace("Query finished after HSearch. Skip db query phase"); 530 if (theSearchProperties.hasMaxResultsRequested()) { 531 fulltextExecutor = SearchQueryExecutors.limited( 532 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 533 } 534 queries.add(fulltextExecutor); 535 } else { 536 ourLog.trace("Query needs db after HSearch. Chunking."); 537 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 538 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 539 QueryChunker.chunk( 540 fulltextExecutor, 541 SearchBuilder.getMaximumPageSize(), 542 // for each list of (SearchBuilder.getMaximumPageSize()) 543 // we create a chunked query and add it to 'queries' 544 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 545 } 546 } else { 547 // do everything in the database. 548 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 549 } 550 551 return queries; 552 } 553 554 /** 555 * Check to see if query should use Hibernate Search, and error if the query can't continue. 556 * 557 * @return true if the query should first be processed by Hibernate Search 558 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 559 */ 560 private boolean checkUseHibernateSearch() { 561 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 562 563 if (!fulltextEnabled) { 564 failIfUsed(Constants.PARAM_TEXT); 565 failIfUsed(Constants.PARAM_CONTENT); 566 } else { 567 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 568 final String paramName = sortSpec.getParamName(); 569 if (paramName.contains(".")) { 570 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 571 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 572 } 573 } 574 } 575 576 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 577 // can. 578 return fulltextEnabled 579 && myParams != null 580 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 581 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 582 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 583 } 584 585 private void failIfUsed(String theParamName) { 586 if (myParams.containsKey(theParamName)) { 587 throw new InvalidRequestException(Msg.code(1192) 588 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 589 } 590 } 591 592 private void failIfUsedWithChainedSort(String theParamName) { 593 if (myParams.containsKey(theParamName)) { 594 throw new InvalidRequestException(Msg.code(2524) 595 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 596 + theParamName); 597 } 598 } 599 600 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 601 // Can we use our hibernate search generated index on resource to support lastN?: 602 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 603 if (myFulltextSearchSvc == null) { 604 throw new InvalidRequestException(Msg.code(2027) 605 + "LastN operation is not enabled on this service, can not process this request"); 606 } 607 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 608 .map(t -> (JpaPid) t) 609 .collect(Collectors.toList()); 610 } else { 611 throw new InvalidRequestException( 612 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 613 } 614 } 615 616 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 617 JpaPid pid = null; 618 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 619 String idParamValue; 620 IQueryParameterType idParam = 621 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 622 if (idParam instanceof TokenParam idParm) { 623 idParamValue = idParm.getValue(); 624 } else { 625 StringParam idParm = (StringParam) idParam; 626 idParamValue = idParm.getValue(); 627 } 628 629 pid = myIdHelperService 630 .resolveResourceIdentity( 631 myRequestPartitionId, 632 myResourceName, 633 idParamValue, 634 ResolveIdentityMode.includeDeleted().cacheOk()) 635 .getPersistentId(); 636 } 637 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 638 } 639 640 private void doCreateChunkedQueries( 641 SearchParameterMap theParams, 642 List<JpaPid> thePids, 643 SearchQueryProperties theSearchQueryProperties, 644 RequestDetails theRequest, 645 ArrayList<ISearchQueryExecutor> theQueries) { 646 647 if (thePids.size() < getMaximumPageSize()) { 648 thePids = normalizeIdListForInClause(thePids); 649 } 650 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 651 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 652 } 653 654 /** 655 * Combs through the params for any _id parameters and extracts the PIDs for them 656 */ 657 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 658 // get all the IQueryParameterType objects 659 // for _id -> these should all be StringParam values 660 HashSet<IIdType> ids = new HashSet<>(); 661 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 662 for (List<IQueryParameterType> paramList : params) { 663 for (IQueryParameterType param : paramList) { 664 String id; 665 if (param instanceof StringParam) { 666 // we expect all _id values to be StringParams 667 id = ((StringParam) param).getValue(); 668 } else if (param instanceof TokenParam) { 669 id = ((TokenParam) param).getValue(); 670 } else { 671 // we do not expect the _id parameter to be a non-string value 672 throw new IllegalArgumentException( 673 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 674 } 675 676 IIdType idType = myContext.getVersion().newIdType(); 677 if (id.contains("/")) { 678 idType.setValue(id); 679 } else { 680 idType.setValue(myResourceName + "/" + id); 681 } 682 ids.add(idType); 683 } 684 } 685 686 // fetch our target Pids 687 // this will throw if an id is not found 688 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 689 myRequestPartitionId, 690 new ArrayList<>(ids), 691 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 692 693 // add the pids to targetPids 694 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 695 theTargetPids.add(pid.getPersistentId()); 696 } 697 } 698 699 private void createChunkedQuery( 700 SearchParameterMap theParams, 701 SearchQueryProperties theSearchProperties, 702 RequestDetails theRequest, 703 List<JpaPid> thePidList, 704 List<ISearchQueryExecutor> theSearchQueryExecutors) { 705 if (myParams.getEverythingMode() != null) { 706 createChunkedQueryForEverythingSearch( 707 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 708 } else { 709 createChunkedQueryNormalSearch( 710 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 711 } 712 } 713 714 private void createChunkedQueryNormalSearch( 715 SearchParameterMap theParams, 716 SearchQueryProperties theSearchProperties, 717 RequestDetails theRequest, 718 List<JpaPid> thePidList, 719 List<ISearchQueryExecutor> theSearchQueryExecutors) { 720 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 721 myContext, 722 myStorageSettings, 723 myPartitionSettings, 724 myRequestPartitionId, 725 myResourceName, 726 mySqlBuilderFactory, 727 myDialectProvider, 728 theSearchProperties.isDoCountOnlyFlag(), 729 myResourceName == null || myResourceName.isBlank()); 730 QueryStack queryStack3 = new QueryStack( 731 theRequest, 732 theParams, 733 myStorageSettings, 734 myContext, 735 sqlBuilder, 736 mySearchParamRegistry, 737 myPartitionSettings); 738 739 if (theParams.keySet().size() > 1 740 || theParams.getSort() != null 741 || theParams.keySet().contains(Constants.PARAM_HAS) 742 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 743 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 744 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 745 if (activeComboParams.isEmpty()) { 746 sqlBuilder.setNeedResourceTableRoot(true); 747 } 748 } 749 750 /* 751 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 752 * specific filters with ORs as their root from working around the natural resource type / deletion 753 * status / partition IDs built into queries. 754 */ 755 if (theParams.containsKey(Constants.PARAM_FILTER)) { 756 Condition partitionIdPredicate = sqlBuilder 757 .getOrCreateResourceTablePredicateBuilder() 758 .createPartitionIdPredicate(myRequestPartitionId); 759 if (partitionIdPredicate != null) { 760 sqlBuilder.addPredicate(partitionIdPredicate); 761 } 762 } 763 764 // Normal search 765 // we will create a resourceTablePredicate if and only if we have an _id SP. 766 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 767 768 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 769 // partition ID predicate in that case. 770 if (!sqlBuilder.haveAtLeastOnePredicate()) { 771 Condition partitionIdPredicate; 772 773 if (theParams.getSearchIncludeDeletedMode() != null) { 774 partitionIdPredicate = sqlBuilder 775 .getOrCreateResourceTablePredicateBuilder(true, theParams.getSearchIncludeDeletedMode()) 776 .createPartitionIdPredicate(myRequestPartitionId); 777 } else { 778 partitionIdPredicate = sqlBuilder 779 .getOrCreateResourceTablePredicateBuilder() 780 .createPartitionIdPredicate(myRequestPartitionId); 781 } 782 783 if (partitionIdPredicate != null) { 784 sqlBuilder.addPredicate(partitionIdPredicate); 785 } 786 } 787 788 // Add PID list predicate for full text search and/or lastn operation 789 addPidListPredicate(thePidList, sqlBuilder); 790 791 // Last updated 792 addLastUpdatePredicate(sqlBuilder); 793 794 /* 795 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 796 * to something needed to guarantee correct results. 797 * 798 * Why do we need it? Suppose for example, a query like: 799 * Observation?category=foo,bar,baz 800 * And suppose you have many resources that have all 3 of these category codes. In this case 801 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 802 * we may exhaust the query results without getting enough distinct results back. When that 803 * happens we re-run the query with a larger limit. Excluding results we already know about 804 * tries to ensure that we get new unique results. 805 * 806 * The challenge with that though is that lots of DBs have an issue with too many 807 * parameters in one query. So we only do this optimization if there aren't too 808 * many results. 809 */ 810 if (myHasNextIteratorQuery) { 811 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 812 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 813 } 814 } 815 816 /* 817 * If offset is present, we want to deduplicate the results by using GROUP BY; 818 * OR 819 * if the MaxResultsToFetch is null, we are requesting "everything", 820 * so we'll let the db do the deduplication (instead of in-memory) 821 */ 822 if (theSearchProperties.isDeduplicateInDatabase()) { 823 queryStack3.addGrouping(); 824 queryStack3.setUseAggregate(true); 825 } 826 827 /* 828 * Sort 829 * 830 * If we have a sort, we wrap the criteria search (the search that actually 831 * finds the appropriate resources) in an outer search which is then sorted 832 */ 833 if (theSearchProperties.hasSort()) { 834 assert !theSearchProperties.isDoCountOnlyFlag(); 835 836 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 837 } 838 839 /* 840 * Now perform the search 841 */ 842 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 843 } 844 845 private void executeSearch( 846 SearchQueryProperties theProperties, 847 List<ISearchQueryExecutor> theSearchQueryExecutors, 848 SearchQueryBuilder sqlBuilder) { 849 GeneratedSql generatedSql = 850 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 851 if (!generatedSql.isMatchNothing()) { 852 SearchQueryExecutor executor = 853 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 854 theSearchQueryExecutors.add(executor); 855 } 856 } 857 858 private void createChunkedQueryForEverythingSearch( 859 RequestDetails theRequest, 860 SearchParameterMap theParams, 861 SearchQueryProperties theSearchQueryProperties, 862 List<JpaPid> thePidList, 863 List<ISearchQueryExecutor> theSearchQueryExecutors) { 864 865 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 866 myContext, 867 myStorageSettings, 868 myPartitionSettings, 869 myRequestPartitionId, 870 null, 871 mySqlBuilderFactory, 872 myDialectProvider, 873 theSearchQueryProperties.isDoCountOnlyFlag(), 874 false); 875 876 QueryStack queryStack3 = new QueryStack( 877 theRequest, 878 theParams, 879 myStorageSettings, 880 myContext, 881 sqlBuilder, 882 mySearchParamRegistry, 883 myPartitionSettings); 884 885 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 886 887 Set<JpaPid> targetPids = new HashSet<>(); 888 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 889 890 extractTargetPidsFromIdParams(targetPids); 891 892 // add the target pids to our executors as the first 893 // results iterator to go through 894 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 895 } else { 896 // For Everything queries, we make the query root by the ResourceLink table, since this query 897 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 898 // the one problem with this approach is that it doesn't catch Patients that have absolutely 899 // nothing linked to them. So we do one additional query to make sure we catch those too. 900 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 901 myContext, 902 myStorageSettings, 903 myPartitionSettings, 904 myRequestPartitionId, 905 myResourceName, 906 mySqlBuilderFactory, 907 myDialectProvider, 908 theSearchQueryProperties.isDoCountOnlyFlag(), 909 false); 910 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 911 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 912 String sql = allTargetsSql.getSql(); 913 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 914 915 List<JpaPid> output = 916 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 917 918 // we add a search executor to fetch unlinked patients first 919 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 920 } 921 922 List<String> typeSourceResources = new ArrayList<>(); 923 if (myParams.get(Constants.PARAM_TYPE) != null) { 924 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 925 } 926 927 queryStack3.addPredicateEverythingOperation( 928 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 929 930 // Add PID list predicate for full text search and/or lastn operation 931 addPidListPredicate(thePidList, sqlBuilder); 932 933 /* 934 * If offset is present, we want deduplicate the results by using GROUP BY 935 * ORDER BY is required to make sure we return unique results for each page 936 */ 937 if (theSearchQueryProperties.hasOffset()) { 938 queryStack3.addGrouping(); 939 queryStack3.addOrdering(); 940 queryStack3.setUseAggregate(true); 941 } 942 943 if (myParams.getEverythingMode().isPatient()) { 944 Collection<String> resourcesToOmit = 945 mySearchLimiterSvc.getResourcesToOmitForOperationSearches(JpaConstants.OPERATION_EVERYTHING); 946 sqlBuilder.excludeResourceTypesPredicate(resourcesToOmit); 947 } 948 949 /* 950 * Now perform the search 951 */ 952 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 953 } 954 955 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 956 if (thePidList != null && !thePidList.isEmpty()) { 957 theSqlBuilder.addResourceIdsPredicate(thePidList); 958 } 959 } 960 961 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 962 DateRangeParam lu = myParams.getLastUpdated(); 963 if (lu != null && !lu.isEmpty()) { 964 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 965 theSqlBuilder.addPredicate(lastUpdatedPredicates); 966 } 967 } 968 969 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 970 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 971 jdbcTemplate.setFetchSize(myFetchSize); 972 if (theMaximumResults != null) { 973 jdbcTemplate.setMaxRows(theMaximumResults); 974 } 975 return jdbcTemplate; 976 } 977 978 private Collection<String> extractTypeSourceResourcesFromParams() { 979 980 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 981 982 // first off, let's flatten the list of list 983 List<IQueryParameterType> iQueryParameterTypesList = 984 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 985 986 // then, extract all elements of each CSV into one big list 987 List<String> resourceTypes = iQueryParameterTypesList.stream() 988 .map(param -> ((StringParam) param).getValue()) 989 .map(csvString -> List.of(csvString.split(","))) 990 .flatMap(List::stream) 991 .collect(Collectors.toList()); 992 993 Set<String> knownResourceTypes = myContext.getResourceTypes(); 994 995 // remove leading/trailing whitespaces if any and remove duplicates 996 Set<String> retVal = new HashSet<>(); 997 998 for (String type : resourceTypes) { 999 String trimmed = type.trim(); 1000 if (!knownResourceTypes.contains(trimmed)) { 1001 throw new ResourceNotFoundException( 1002 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 1003 } 1004 retVal.add(trimmed); 1005 } 1006 1007 return retVal; 1008 } 1009 1010 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 1011 return myStorageSettings.isIndexOnContainedResources() 1012 && theParams.values().stream() 1013 .flatMap(Collection::stream) 1014 .flatMap(Collection::stream) 1015 .anyMatch(ReferenceParam.class::isInstance); 1016 } 1017 1018 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1019 if (theSort == null || isBlank(theSort.getParamName())) { 1020 return; 1021 } 1022 1023 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1024 1025 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1026 1027 theQueryStack.addSortOnResourceId(ascending); 1028 1029 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1030 1031 theQueryStack.addSortOnResourcePID(ascending); 1032 1033 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1034 1035 theQueryStack.addSortOnLastUpdated(ascending); 1036 1037 } else { 1038 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1039 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1040 1041 /* 1042 * If we have a sort like _sort=subject.name and we have an 1043 * uplifted refchain for that combination we can do it more efficiently 1044 * by using the index associated with the uplifted refchain. In this case, 1045 * we need to find the actual target search parameter (corresponding 1046 * to "name" in this example) so that we know what datatype it is. 1047 */ 1048 String paramName = theSort.getParamName(); 1049 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1050 String[] chains = StringUtils.split(paramName, '.'); 1051 if (chains.length == 2) { 1052 1053 // Given: Encounter?_sort=Patient:subject.name 1054 String referenceParam = chains[0]; // subject 1055 String referenceParamTargetType = null; // Patient 1056 String targetParam = chains[1]; // name 1057 1058 int colonIdx = referenceParam.indexOf(':'); 1059 if (colonIdx > -1) { 1060 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1061 referenceParam = referenceParam.substring(colonIdx + 1); 1062 } 1063 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1064 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1065 if (outerParam == null) { 1066 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1067 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1068 for (String nextTargetType : outerParam.getTargets()) { 1069 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1070 continue; 1071 } 1072 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1073 nextTargetType, 1074 targetParam, 1075 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1076 if (innerParam != null) { 1077 param = innerParam; 1078 break; 1079 } 1080 } 1081 } 1082 } 1083 } 1084 1085 int colonIdx = paramName.indexOf(':'); 1086 String referenceTargetType = null; 1087 if (colonIdx > -1) { 1088 referenceTargetType = paramName.substring(0, colonIdx); 1089 paramName = paramName.substring(colonIdx + 1); 1090 } 1091 1092 int dotIdx = paramName.indexOf('.'); 1093 String chainName = null; 1094 if (param == null && dotIdx > -1) { 1095 chainName = paramName.substring(dotIdx + 1); 1096 paramName = paramName.substring(0, dotIdx); 1097 if (chainName.contains(".")) { 1098 String msg = myContext 1099 .getLocalizer() 1100 .getMessageSanitized( 1101 BaseStorageDao.class, 1102 "invalidSortParameterTooManyChains", 1103 paramName + "." + chainName); 1104 throw new InvalidRequestException(Msg.code(2286) + msg); 1105 } 1106 } 1107 1108 if (param == null) { 1109 param = mySearchParamRegistry.getActiveSearchParam( 1110 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1111 } 1112 1113 if (param == null) { 1114 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1115 } 1116 1117 // param will never be null here (the above line throws if it does) 1118 // this is just to prevent the warning 1119 assert param != null; 1120 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1121 throw new InvalidRequestException( 1122 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1123 } 1124 1125 switch (param.getParamType()) { 1126 case STRING: 1127 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1128 break; 1129 case DATE: 1130 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1131 break; 1132 case REFERENCE: 1133 theQueryStack.addSortOnResourceLink( 1134 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1135 break; 1136 case TOKEN: 1137 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1138 break; 1139 case NUMBER: 1140 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1141 break; 1142 case URI: 1143 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1144 break; 1145 case QUANTITY: 1146 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1147 break; 1148 case COMPOSITE: 1149 List<RuntimeSearchParam> compositeList = 1150 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1151 if (compositeList == null) { 1152 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1153 + " is not defined by the resource " + myResourceName); 1154 } 1155 if (compositeList.size() != 2) { 1156 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1157 + " must have 2 composite types declared in parameter annotation, found " 1158 + compositeList.size()); 1159 } 1160 RuntimeSearchParam left = compositeList.get(0); 1161 RuntimeSearchParam right = compositeList.get(1); 1162 1163 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1164 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1165 1166 break; 1167 case SPECIAL: 1168 if (LOCATION_POSITION.equals(param.getPath())) { 1169 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1170 break; 1171 } 1172 throw new InvalidRequestException( 1173 Msg.code(2306) + "This server does not support _sort specifications of type " 1174 + param.getParamType() + " - Can't serve _sort=" + paramName); 1175 1176 case HAS: 1177 default: 1178 throw new InvalidRequestException( 1179 Msg.code(1197) + "This server does not support _sort specifications of type " 1180 + param.getParamType() + " - Can't serve _sort=" + paramName); 1181 } 1182 } 1183 1184 // Recurse 1185 createSort(theQueryStack, theSort.getChain(), theParams); 1186 } 1187 1188 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1189 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1190 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1191 String msg = myContext 1192 .getLocalizer() 1193 .getMessageSanitized( 1194 BaseStorageDao.class, 1195 "invalidSortParameter", 1196 theParamName, 1197 theResourceName, 1198 validSearchParameterNames); 1199 throw new InvalidRequestException(Msg.code(1194) + msg); 1200 } 1201 1202 private void createCompositeSort( 1203 QueryStack theQueryStack, 1204 RestSearchParameterTypeEnum theParamType, 1205 String theParamName, 1206 boolean theAscending) { 1207 1208 switch (theParamType) { 1209 case STRING: 1210 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1211 break; 1212 case DATE: 1213 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1214 break; 1215 case TOKEN: 1216 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1217 break; 1218 case QUANTITY: 1219 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1220 break; 1221 case NUMBER: 1222 case REFERENCE: 1223 case COMPOSITE: 1224 case URI: 1225 case HAS: 1226 case SPECIAL: 1227 default: 1228 throw new InvalidRequestException( 1229 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1230 + " on _sort=" + theParamName); 1231 } 1232 } 1233 1234 private void doLoadPids( 1235 Collection<JpaPid> thePids, 1236 Collection<JpaPid> theIncludedPids, 1237 List<IBaseResource> theResourceListToPopulate, 1238 boolean theForHistoryOperation, 1239 Map<Long, Integer> thePosition) { 1240 Map<JpaPid, Long> resourcePidToVersion = null; 1241 for (JpaPid next : thePids) { 1242 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1243 if (resourcePidToVersion == null) { 1244 resourcePidToVersion = new HashMap<>(); 1245 } 1246 resourcePidToVersion.put(next, next.getVersion()); 1247 } 1248 } 1249 1250 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1251 if (versionlessPids.size() < getMaximumPageSize()) { 1252 versionlessPids = normalizeIdListForInClause(versionlessPids); 1253 } 1254 1255 // Load the resource bodies 1256 List<ResourceHistoryTable> resourceSearchViewList = 1257 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1258 JpaPidFk.fromPids(versionlessPids)); 1259 1260 /* 1261 * If we have specific versions to load, replace the history entries with the 1262 * correct ones 1263 * 1264 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1265 * version entity first, and by batching the fetches. But this is a fairly infrequently 1266 * used feature, and loading history entities by PK is a very efficient query so it's 1267 * not the end of the world 1268 */ 1269 if (resourcePidToVersion != null) { 1270 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1271 ResourceHistoryTable next = resourceSearchViewList.get(i); 1272 JpaPid resourceId = next.getPersistentId(); 1273 Long version = resourcePidToVersion.get(resourceId); 1274 resourceId.setVersion(version); 1275 if (version != null && !version.equals(next.getVersion())) { 1276 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1277 next.getResourceId().toFk(), version); 1278 resourceSearchViewList.set(i, replacement); 1279 } 1280 } 1281 } 1282 1283 // -- preload all tags with tag definition if any 1284 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1285 1286 for (ResourceHistoryTable next : resourceSearchViewList) { 1287 if (next.getDeleted() != null) { 1288 continue; 1289 } 1290 1291 Class<? extends IBaseResource> resourceType = 1292 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1293 1294 JpaPid resourceId = next.getPersistentId(); 1295 1296 if (resourcePidToVersion != null) { 1297 Long version = resourcePidToVersion.get(resourceId); 1298 resourceId.setVersion(version); 1299 } 1300 1301 IBaseResource resource; 1302 resource = myJpaStorageResourceParser.toResource( 1303 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1304 if (resource == null) { 1305 ourLog.warn( 1306 "Unable to find resource {}/{}/_history/{} in database", 1307 next.getResourceType(), 1308 next.getIdDt().getIdPart(), 1309 next.getVersion()); 1310 continue; 1311 } 1312 1313 Integer index = thePosition.get(resourceId.getId()); 1314 if (index == null) { 1315 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1316 continue; 1317 } 1318 1319 if (theIncludedPids.contains(resourceId)) { 1320 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1321 } else { 1322 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1323 } 1324 1325 // ensure there's enough space; "<=" because of 0-indexing 1326 while (theResourceListToPopulate.size() <= index) { 1327 theResourceListToPopulate.add(null); 1328 } 1329 theResourceListToPopulate.set(index, resource); 1330 } 1331 } 1332 1333 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1334 return switch (myStorageSettings.getTagStorageMode()) { 1335 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1336 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1337 case INLINE -> Map.of(); 1338 }; 1339 } 1340 1341 @Nonnull 1342 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1343 Collection<ResourceHistoryTable> theHistoryTables) { 1344 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1345 1346 // -- find all resource has tags 1347 for (ResourceHistoryTable resource : theHistoryTables) { 1348 if (resource.isHasTags()) { 1349 idList.add(resource.getId()); 1350 } 1351 } 1352 1353 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1354 1355 // -- no tags 1356 if (idList.isEmpty()) { 1357 return tagMap; 1358 } 1359 1360 // -- get all tags for the idList 1361 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1362 1363 // -- build the map, key = resourceId, value = list of ResourceTag 1364 JpaPid resourceId; 1365 Collection<BaseTag> tagCol; 1366 for (ResourceHistoryTag tag : tagList) { 1367 1368 resourceId = tag.getResourcePid(); 1369 tagCol = tagMap.get(resourceId); 1370 if (tagCol == null) { 1371 tagCol = new ArrayList<>(); 1372 tagCol.add(tag); 1373 tagMap.put(resourceId, tagCol); 1374 } else { 1375 tagCol.add(tag); 1376 } 1377 } 1378 1379 return tagMap; 1380 } 1381 1382 @Nonnull 1383 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1384 Collection<ResourceHistoryTable> theHistoryTables) { 1385 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1386 1387 // -- find all resource has tags 1388 for (ResourceHistoryTable resource : theHistoryTables) { 1389 if (resource.isHasTags()) { 1390 idList.add(resource.getResourceId()); 1391 } 1392 } 1393 1394 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1395 1396 // -- no tags 1397 if (idList.isEmpty()) { 1398 return tagMap; 1399 } 1400 1401 // -- get all tags for the idList 1402 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1403 1404 // -- build the map, key = resourceId, value = list of ResourceTag 1405 JpaPid resourceId; 1406 Collection<BaseTag> tagCol; 1407 for (ResourceTag tag : tagList) { 1408 1409 resourceId = tag.getResourceId(); 1410 tagCol = tagMap.get(resourceId); 1411 if (tagCol == null) { 1412 tagCol = new ArrayList<>(); 1413 tagCol.add(tag); 1414 tagMap.put(resourceId, tagCol); 1415 } else { 1416 tagCol.add(tag); 1417 } 1418 } 1419 1420 return tagMap; 1421 } 1422 1423 @Override 1424 public void loadResourcesByPid( 1425 Collection<JpaPid> thePids, 1426 Collection<JpaPid> theIncludedPids, 1427 List<IBaseResource> theResourceListToPopulate, 1428 boolean theForHistoryOperation, 1429 RequestDetails theDetails) { 1430 if (thePids.isEmpty()) { 1431 ourLog.debug("The include pids are empty"); 1432 } 1433 1434 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1435 // when running asserts 1436 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1437 1438 Map<Long, Integer> position = new HashMap<>(); 1439 int index = 0; 1440 for (JpaPid next : thePids) { 1441 position.put(next.getId(), index++); 1442 } 1443 1444 // Can we fast track this loading by checking elastic search? 1445 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1446 if (isUsingElasticSearch) { 1447 try { 1448 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1449 return; 1450 1451 } catch (ResourceNotFoundInIndexException theE) { 1452 // some resources were not found in index, so we will inform this and resort to JPA search 1453 ourLog.warn( 1454 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1455 } 1456 } 1457 1458 // We only chunk because some jdbc drivers can't handle long param lists. 1459 QueryChunker.chunk(thePids, t -> { 1460 doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position); 1461 }); 1462 } 1463 1464 /** 1465 * Check if we can load the resources from Hibernate Search instead of the database. 1466 * We assume this is faster. 1467 * <p> 1468 * Hibernate Search only stores the current version, and only if enabled. 1469 * 1470 * @param thePids the pids to check for versioned references 1471 * @return can we fetch from Hibernate Search? 1472 */ 1473 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1474 // is storage enabled? 1475 return myStorageSettings.isStoreResourceInHSearchIndex() 1476 && myStorageSettings.isHibernateSearchIndexSearchParams() 1477 && 1478 // we don't support history 1479 thePids.stream().noneMatch(p -> p.getVersion() != null) 1480 && 1481 // skip the complexity for metadata in dstu2 1482 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1483 } 1484 1485 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1486 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1487 // only impl 1488 // to handle lastN? 1489 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1490 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1491 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1492 1493 return myFulltextSearchSvc.getResources(pidList); 1494 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1495 // legacy LastN implementation 1496 return myIElasticsearchSvc.getObservationResources(thePids); 1497 } else { 1498 return Collections.emptyList(); 1499 } 1500 } 1501 1502 /** 1503 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1504 * so it can't be Collections.emptySet() or some such thing. 1505 * The JpaPid returned will have resource type populated. 1506 */ 1507 @Override 1508 public Set<JpaPid> loadIncludes( 1509 FhirContext theContext, 1510 EntityManager theEntityManager, 1511 Collection<JpaPid> theMatches, 1512 Collection<Include> theIncludes, 1513 boolean theReverseMode, 1514 DateRangeParam theLastUpdated, 1515 String theSearchIdOrDescription, 1516 RequestDetails theRequest, 1517 Integer theMaxCount) { 1518 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1519 parameters.setFhirContext(theContext); 1520 parameters.setEntityManager(theEntityManager); 1521 parameters.setMatches(theMatches); 1522 parameters.setIncludeFilters(theIncludes); 1523 parameters.setReverseMode(theReverseMode); 1524 parameters.setLastUpdated(theLastUpdated); 1525 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1526 parameters.setRequestDetails(theRequest); 1527 parameters.setMaxCount(theMaxCount); 1528 return loadIncludes(parameters); 1529 } 1530 1531 @Override 1532 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1533 Collection<JpaPid> matches = theParameters.getMatches(); 1534 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1535 boolean reverseMode = theParameters.isReverseMode(); 1536 EntityManager entityManager = theParameters.getEntityManager(); 1537 Integer maxCount = theParameters.getMaxCount(); 1538 FhirContext fhirContext = theParameters.getFhirContext(); 1539 RequestDetails request = theParameters.getRequestDetails(); 1540 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1541 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1542 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1543 IInterceptorBroadcaster compositeBroadcaster = 1544 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1545 1546 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1547 CurrentThreadCaptureQueriesListener.startCapturing(); 1548 } 1549 if (matches.isEmpty()) { 1550 return new HashSet<>(); 1551 } 1552 if (currentIncludes == null || currentIncludes.isEmpty()) { 1553 return new HashSet<>(); 1554 } 1555 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1556 String searchPartitionIdFieldName = 1557 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1558 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1559 String findPartitionIdFieldName = 1560 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1561 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1562 String findVersionFieldName = null; 1563 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1564 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1565 } 1566 1567 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1568 HashSet<JpaPid> allAdded = new HashSet<>(); 1569 HashSet<JpaPid> original = new HashSet<>(matches); 1570 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1571 1572 int roundCounts = 0; 1573 StopWatch w = new StopWatch(); 1574 1575 boolean addedSomeThisRound; 1576 do { 1577 roundCounts++; 1578 1579 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1580 1581 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1582 Include nextInclude = iter.next(); 1583 if (!nextInclude.isRecurse()) { 1584 iter.remove(); 1585 } 1586 1587 // Account for _include=* 1588 boolean matchAll = "*".equals(nextInclude.getValue()); 1589 1590 // Account for _include=[resourceType]:* 1591 String wantResourceType = null; 1592 if (!matchAll) { 1593 if ("*".equals(nextInclude.getParamName())) { 1594 wantResourceType = nextInclude.getParamType(); 1595 matchAll = true; 1596 } 1597 } 1598 1599 if (matchAll) { 1600 loadIncludesMatchAll( 1601 findPidFieldName, 1602 findPartitionIdFieldName, 1603 findResourceTypeFieldName, 1604 findVersionFieldName, 1605 searchPidFieldName, 1606 searchPartitionIdFieldName, 1607 wantResourceType, 1608 reverseMode, 1609 hasDesiredResourceTypes, 1610 nextRoundMatches, 1611 entityManager, 1612 maxCount, 1613 desiredResourceTypes, 1614 pidsToInclude, 1615 request); 1616 } else { 1617 loadIncludesMatchSpecific( 1618 nextInclude, 1619 fhirContext, 1620 findPidFieldName, 1621 findPartitionIdFieldName, 1622 findVersionFieldName, 1623 searchPidFieldName, 1624 reverseMode, 1625 nextRoundMatches, 1626 entityManager, 1627 maxCount, 1628 pidsToInclude, 1629 request); 1630 } 1631 } 1632 1633 nextRoundMatches.clear(); 1634 for (JpaPid next : pidsToInclude) { 1635 if (!original.contains(next) && !allAdded.contains(next)) { 1636 nextRoundMatches.add(next); 1637 } else { 1638 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1639 } 1640 } 1641 1642 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1643 1644 if (maxCount != null && allAdded.size() >= maxCount) { 1645 break; 1646 } 1647 1648 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1649 1650 allAdded.removeAll(original); 1651 1652 ourLog.info( 1653 "Loaded {} {} in {} rounds and {} ms for search {}", 1654 allAdded.size(), 1655 reverseMode ? "_revincludes" : "_includes", 1656 roundCounts, 1657 w.getMillisAndRestart(), 1658 searchIdOrDescription); 1659 1660 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1661 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1662 } 1663 1664 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1665 // This can be used to remove results from the search result details before 1666 // the user has a chance to know that they were in the results 1667 if (!allAdded.isEmpty()) { 1668 1669 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1670 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1671 JpaPreResourceAccessDetails accessDetails = 1672 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1673 HookParams params = new HookParams() 1674 .add(IPreResourceAccessDetails.class, accessDetails) 1675 .add(RequestDetails.class, request) 1676 .addIfMatchesType(ServletRequestDetails.class, request); 1677 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1678 1679 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1680 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1681 JpaPid value = includedPidList.remove(i); 1682 if (value != null) { 1683 allAdded.remove(value); 1684 } 1685 } 1686 } 1687 } 1688 } 1689 1690 return allAdded; 1691 } 1692 1693 private void loadIncludesMatchSpecific( 1694 Include nextInclude, 1695 FhirContext fhirContext, 1696 String findPidFieldName, 1697 String findPartitionFieldName, 1698 String findVersionFieldName, 1699 String searchPidFieldName, 1700 boolean reverseMode, 1701 List<JpaPid> nextRoundMatches, 1702 EntityManager entityManager, 1703 Integer maxCount, 1704 HashSet<JpaPid> pidsToInclude, 1705 RequestDetails theRequest) { 1706 List<String> paths; 1707 1708 // Start replace 1709 RuntimeSearchParam param; 1710 String resType = nextInclude.getParamType(); 1711 if (isBlank(resType)) { 1712 return; 1713 } 1714 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1715 if (def == null) { 1716 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1717 return; 1718 } 1719 1720 String paramName = nextInclude.getParamName(); 1721 if (isNotBlank(paramName)) { 1722 param = mySearchParamRegistry.getActiveSearchParam( 1723 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1724 } else { 1725 param = null; 1726 } 1727 if (param == null) { 1728 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1729 return; 1730 } 1731 1732 paths = param.getPathsSplitForResourceType(resType); 1733 // end replace 1734 1735 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1736 1737 for (String nextPath : paths) { 1738 String findPidFieldSqlColumn = 1739 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1740 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1741 if (findVersionFieldName != null) { 1742 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1743 } 1744 if (myPartitionSettings.isDatabasePartitionMode()) { 1745 fieldsToLoad += ", r."; 1746 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1747 ? "partition_id" 1748 : "target_res_partition_id"; 1749 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1750 } 1751 1752 // Query for includes lookup has 2 cases 1753 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1754 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1755 // url in target_resource_url 1756 1757 // Case 1: 1758 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1759 1760 String searchPidFieldSqlColumn = 1761 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1762 StringBuilder localReferenceQuery = new StringBuilder(); 1763 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1764 localReferenceQuery.append(" FROM hfj_res_link r "); 1765 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1766 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1767 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1768 } 1769 localReferenceQuery 1770 .append(" AND r.") 1771 .append(searchPidFieldSqlColumn) 1772 .append(" IN (:target_pids) "); 1773 if (myPartitionSettings.isDatabasePartitionMode()) { 1774 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1775 ? "target_res_partition_id" 1776 : "partition_id"; 1777 localReferenceQuery 1778 .append("AND r.") 1779 .append(partitionFieldToSearch) 1780 .append(" = :search_partition_id "); 1781 } 1782 localReferenceQueryParams.put("src_path", nextPath); 1783 // we loop over target_pids later. 1784 if (targetResourceTypes != null) { 1785 if (targetResourceTypes.size() == 1) { 1786 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1787 localReferenceQueryParams.put( 1788 "target_resource_type", 1789 targetResourceTypes.iterator().next()); 1790 } else { 1791 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1792 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1793 } 1794 } 1795 1796 // Case 2: 1797 Pair<String, Map<String, Object>> canonicalQuery = 1798 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1799 1800 String sql = localReferenceQuery.toString(); 1801 if (canonicalQuery != null) { 1802 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1803 } 1804 1805 Map<String, Object> limitParams = new HashMap<>(); 1806 if (maxCount != null) { 1807 LinkedList<Object> bindVariables = new LinkedList<>(); 1808 sql = SearchQueryBuilder.applyLimitToSql( 1809 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1810 1811 // The dialect SQL limiter uses positional params, but we're using 1812 // named params here, so we need to replace the positional params 1813 // with equivalent named ones 1814 StringBuilder sb = new StringBuilder(); 1815 for (int i = 0; i < sql.length(); i++) { 1816 char nextChar = sql.charAt(i); 1817 if (nextChar == '?') { 1818 String nextName = "limit" + i; 1819 sb.append(':').append(nextName); 1820 limitParams.put(nextName, bindVariables.removeFirst()); 1821 } else { 1822 sb.append(nextChar); 1823 } 1824 } 1825 sql = sb.toString(); 1826 } 1827 1828 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1829 for (Collection<JpaPid> nextPartition : partitions) { 1830 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1831 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1832 if (myPartitionSettings.isDatabasePartitionMode()) { 1833 q.setParameter( 1834 "search_partition_id", 1835 nextPartition.iterator().next().getPartitionId()); 1836 } 1837 localReferenceQueryParams.forEach(q::setParameter); 1838 if (canonicalQuery != null) { 1839 canonicalQuery.getRight().forEach(q::setParameter); 1840 } 1841 limitParams.forEach(q::setParameter); 1842 1843 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1844 Tuple result; 1845 while (iter.hasNext()) { 1846 result = iter.next(); 1847 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1848 Long resourceVersion = null; 1849 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1850 resourceVersion = 1851 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1852 } 1853 Integer partitionId = null; 1854 if (myPartitionSettings.isDatabasePartitionMode()) { 1855 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1856 } 1857 1858 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1859 pid.setPartitionId(partitionId); 1860 pidsToInclude.add(pid); 1861 } 1862 } 1863 // myEntityManager.clear(); 1864 } 1865 } 1866 } 1867 1868 private void loadIncludesMatchAll( 1869 String findPidFieldName, 1870 String findPartitionFieldName, 1871 String findResourceTypeFieldName, 1872 String findVersionFieldName, 1873 String searchPidFieldName, 1874 String searchPartitionFieldName, 1875 String wantResourceType, 1876 boolean reverseMode, 1877 boolean hasDesiredResourceTypes, 1878 List<JpaPid> nextRoundMatches, 1879 EntityManager entityManager, 1880 Integer maxCount, 1881 List<String> desiredResourceTypes, 1882 HashSet<JpaPid> pidsToInclude, 1883 RequestDetails request) { 1884 1885 record IncludesRecord( 1886 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1887 1888 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1889 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1890 Root<ResourceLink> root = query.from(ResourceLink.class); 1891 1892 List<Selection<?>> selectionList = new ArrayList<>(); 1893 selectionList.add(root.get(findPidFieldName)); 1894 selectionList.add(root.get(findResourceTypeFieldName)); 1895 selectionList.add(root.get("myTargetResourceUrl")); 1896 if (findVersionFieldName != null) { 1897 selectionList.add(root.get(findVersionFieldName)); 1898 } else { 1899 selectionList.add(cb.nullLiteral(Long.class)); 1900 } 1901 if (myPartitionSettings.isDatabasePartitionMode()) { 1902 selectionList.add(root.get(findPartitionFieldName)); 1903 } else { 1904 selectionList.add(cb.nullLiteral(Integer.class)); 1905 } 1906 query.multiselect(selectionList); 1907 1908 List<Predicate> predicates = new ArrayList<>(); 1909 1910 if (myPartitionSettings.isDatabasePartitionMode()) { 1911 predicates.add( 1912 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 1913 } 1914 1915 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 1916 1917 /* 1918 * We need to set the resource type in 2 cases only: 1919 * 1) we are in $everything mode 1920 * (where we only want to fetch specific resource types, regardless of what is 1921 * available to fetch) 1922 * 2) we are doing revincludes 1923 * 1924 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1925 * should always be checking the source resource type on the resource link. We don't 1926 * actually index that column though by default, so in order to try and be efficient 1927 * we don't actually include it for includes (but we do for revincludes). This is 1928 * because for an include, it doesn't really make sense to include a different 1929 * resource type than the one you are searching on. 1930 */ 1931 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1932 // because mySourceResourceType is not part of the HFJ_RES_LINK 1933 // index, this might not be the most optimal performance. 1934 // but it is for an $everything operation (and maybe we should update the index) 1935 predicates.add( 1936 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 1937 } else { 1938 wantResourceType = null; 1939 } 1940 1941 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1942 // resources 1943 // (e.g. via Provenance, List, or Group) when in an $everything operation 1944 if (myParams != null 1945 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1946 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 1947 predicates.add(cb.not(root.get("mySourceResourceType") 1948 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 1949 } 1950 1951 if (hasDesiredResourceTypes) { 1952 predicates.add( 1953 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 1954 } 1955 1956 query.where(cb.and(predicates.toArray(new Predicate[0]))); 1957 1958 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1959 for (Collection<JpaPid> nextPartition : partitions) { 1960 1961 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 1962 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1963 if (myPartitionSettings.isDatabasePartitionMode()) { 1964 q.setParameter( 1965 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1966 } 1967 if (wantResourceType != null) { 1968 q.setParameter("want_resource_type", wantResourceType); 1969 } 1970 if (maxCount != null) { 1971 q.setMaxResults(maxCount); 1972 } 1973 if (hasDesiredResourceTypes) { 1974 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1975 } 1976 1977 Set<String> canonicalUrls = null; 1978 1979 try (ScrollableResultsIterator<IncludesRecord> iter = 1980 new ScrollableResultsIterator<>(toScrollableResults(q))) { 1981 IncludesRecord nextRow; 1982 while (iter.hasNext()) { 1983 nextRow = iter.next(); 1984 if (nextRow == null) { 1985 // This can happen if there are outgoing references which are canonical or point to 1986 // other servers 1987 continue; 1988 } 1989 1990 Long version = nextRow.version; 1991 Long resourceId = nextRow.resourceId; 1992 String resourceType = nextRow.resourceType; 1993 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 1994 Integer partitionId = nextRow.partitionId; 1995 1996 if (resourceId != null) { 1997 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1998 pid.setPartitionId(partitionId); 1999 pidsToInclude.add(pid); 2000 } else if (resourceCanonicalUrl != null) { 2001 if (canonicalUrls == null) { 2002 canonicalUrls = new HashSet<>(); 2003 } 2004 canonicalUrls.add(resourceCanonicalUrl); 2005 } 2006 } 2007 } 2008 2009 if (canonicalUrls != null) { 2010 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 2011 } 2012 } 2013 } 2014 2015 private void loadCanonicalUrls( 2016 RequestDetails theRequestDetails, 2017 Set<String> theCanonicalUrls, 2018 EntityManager theEntityManager, 2019 HashSet<JpaPid> thePidsToInclude, 2020 boolean theReverse) { 2021 StringBuilder sqlBuilder; 2022 CanonicalUrlTargets canonicalUrlTargets = 2023 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2024 if (canonicalUrlTargets.isEmpty()) { 2025 return; 2026 } 2027 2028 String message = 2029 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2030 firePerformanceWarning(theRequestDetails, message); 2031 2032 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2033 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2034 2035 sqlBuilder = new StringBuilder(); 2036 sqlBuilder.append("SELECT "); 2037 if (myPartitionSettings.isPartitioningEnabled()) { 2038 sqlBuilder.append("i.myPartitionIdValue, "); 2039 } 2040 sqlBuilder.append("i.myResourcePid "); 2041 2042 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2043 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2044 sqlBuilder.append("AND i.myUri IN (:uris)"); 2045 2046 String canonicalResSql = sqlBuilder.toString(); 2047 2048 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2049 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2050 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2051 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2052 List<Object[]> results = canonicalResIdQuery.getResultList(); 2053 for (var next : results) { 2054 if (next != null) { 2055 Integer partitionId = null; 2056 Long pid; 2057 if (next.length == 1) { 2058 pid = (Long) next[0]; 2059 } else { 2060 partitionId = (Integer) ((Object[]) next)[0]; 2061 pid = (Long) ((Object[]) next)[1]; 2062 } 2063 if (pid != null) { 2064 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2065 } 2066 } 2067 } 2068 } 2069 } 2070 2071 /** 2072 * Calls Performance Trace Hook 2073 * 2074 * @param request the request deatils 2075 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2076 */ 2077 private void callRawSqlHookWithCurrentThreadQueries( 2078 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2079 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2080 HookParams params = new HookParams() 2081 .add(RequestDetails.class, request) 2082 .addIfMatchesType(ServletRequestDetails.class, request) 2083 .add(SqlQueryList.class, capturedQueries); 2084 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2085 } 2086 2087 @Nullable 2088 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2089 String targetResourceType = nextInclude.getParamTargetType(); 2090 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2091 Set<String> targetResourceTypes; 2092 if (targetResourceType != null) { 2093 targetResourceTypes = Set.of(targetResourceType); 2094 } else if (haveTargetTypesDefinedByParam) { 2095 targetResourceTypes = param.getTargets(); 2096 } else { 2097 // all types! 2098 targetResourceTypes = null; 2099 } 2100 return targetResourceTypes; 2101 } 2102 2103 @Nullable 2104 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2105 String theVersionFieldName, 2106 Set<String> theTargetResourceTypes, 2107 boolean theReverse, 2108 RequestDetails theRequest, 2109 RuntimeSearchParam theParam) { 2110 2111 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2112 2113 // If we know for sure that none of the paths involved in this SearchParameter could 2114 // be indexing a canonical 2115 if (Arrays.stream(searchParameterPaths) 2116 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2117 myContext, myResourceName, t, theReverse))) { 2118 return null; 2119 } 2120 2121 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2122 if (theVersionFieldName != null) { 2123 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2124 fieldsToLoadFromSpidxUriTable += ", NULL"; 2125 } 2126 2127 if (myPartitionSettings.isDatabasePartitionMode()) { 2128 if (theReverse) { 2129 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2130 } else { 2131 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2132 } 2133 } 2134 2135 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2136 // But sp_name isn't indexed, so we use hash_identity instead. 2137 CanonicalUrlTargets canonicalUrlTargets = 2138 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2139 if (canonicalUrlTargets.isEmpty()) { 2140 return null; 2141 } 2142 2143 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2144 StringBuilder canonicalUrlQuery = new StringBuilder(); 2145 canonicalUrlQuery 2146 .append("SELECT ") 2147 .append(fieldsToLoadFromSpidxUriTable) 2148 .append(' '); 2149 canonicalUrlQuery.append("FROM hfj_res_link r "); 2150 2151 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2152 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2153 if (myPartitionSettings.isDatabasePartitionMode()) { 2154 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2155 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2156 } 2157 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2158 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2159 canonicalUriQueryParams.put( 2160 "uri_identity_hash", 2161 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2162 } else { 2163 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2164 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2165 } 2166 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2167 canonicalUrlQuery.append(")"); 2168 2169 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2170 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2171 canonicalUrlQuery.append(" AND"); 2172 if (myPartitionSettings.isDatabasePartitionMode()) { 2173 if (theReverse) { 2174 canonicalUrlQuery.append(" rUri.partition_id"); 2175 } else { 2176 canonicalUrlQuery.append(" r.partition_id"); 2177 } 2178 canonicalUrlQuery.append(" = :search_partition_id"); 2179 canonicalUrlQuery.append(" AND"); 2180 } 2181 if (theReverse) { 2182 canonicalUrlQuery.append(" rUri.res_id"); 2183 } else { 2184 canonicalUrlQuery.append(" r.src_resource_id"); 2185 } 2186 canonicalUrlQuery.append(" IN (:target_pids)"); 2187 2188 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2189 } 2190 2191 @Nonnull 2192 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2193 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2194 Set<String> targetResourceTypes = theTargetResourceTypes; 2195 if (targetResourceTypes == null) { 2196 /* 2197 * If we don't have a list of valid target types, we need to figure out a list of all 2198 * possible target types in order to perform the search of the URI index table. This is 2199 * because the hash_identity column encodes the resource type, so we'll need a hash 2200 * value for each possible target type. 2201 */ 2202 targetResourceTypes = new HashSet<>(); 2203 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2204 if (theReverse) { 2205 // For reverse includes, it is really hard to figure out what types 2206 // are actually potentially pointing to the type we're searching for 2207 // in this context, so let's just assume it could be anything. 2208 targetResourceTypes = possibleTypes; 2209 } else { 2210 List<RuntimeSearchParam> params = mySearchParamRegistry 2211 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2212 .values() 2213 .stream() 2214 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2215 .toList(); 2216 for (var next : params) { 2217 2218 String paths = next.getPath(); 2219 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2220 2221 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2222 myContext, myResourceName, path, theReverse)) { 2223 continue; 2224 } 2225 2226 if (!next.getTargets().isEmpty()) { 2227 // For each reference parameter on the resource type we're searching for, 2228 // add all the potential target types to the list of possible target 2229 // resource types we can look up. 2230 for (var nextTarget : next.getTargets()) { 2231 if (possibleTypes.contains(nextTarget)) { 2232 targetResourceTypes.add(nextTarget); 2233 } 2234 } 2235 } else { 2236 // If we have any references that don't define any target types, then 2237 // we need to assume that all enabled resource types are possible target 2238 // types 2239 targetResourceTypes.addAll(possibleTypes); 2240 break; 2241 } 2242 } 2243 } 2244 } 2245 } 2246 2247 if (targetResourceTypes.isEmpty()) { 2248 return new CanonicalUrlTargets(Set.of(), Set.of()); 2249 } 2250 2251 Set<Long> hashIdentityValues = new HashSet<>(); 2252 Set<Integer> partitionIds = new HashSet<>(); 2253 for (String type : targetResourceTypes) { 2254 2255 RequestPartitionId readPartition; 2256 if (myPartitionSettings.isPartitioningEnabled()) { 2257 readPartition = 2258 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2259 } else { 2260 readPartition = RequestPartitionId.defaultPartition(); 2261 } 2262 if (readPartition.hasPartitionIds()) { 2263 partitionIds.addAll(readPartition.getPartitionIds()); 2264 } 2265 2266 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2267 myPartitionSettings, readPartition, type, "url"); 2268 hashIdentityValues.add(hashIdentity); 2269 } 2270 2271 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2272 } 2273 2274 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2275 public boolean isEmpty() { 2276 return hashIdentityValues.isEmpty(); 2277 } 2278 } 2279 2280 /** 2281 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2282 * those pids where: 2283 * <ul> 2284 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2285 * <li>Each list only contains JpaPids with the same partition ID</li> 2286 * </ul> 2287 */ 2288 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2289 2290 if (theNextRoundMatches.size() <= theMaxLoad) { 2291 boolean allSamePartition = true; 2292 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2293 if (!Objects.equals( 2294 theNextRoundMatches.get(i - 1).getPartitionId(), 2295 theNextRoundMatches.get(i).getPartitionId())) { 2296 allSamePartition = false; 2297 break; 2298 } 2299 } 2300 if (allSamePartition) { 2301 return Collections.singletonList(theNextRoundMatches); 2302 } 2303 } 2304 2305 // Break into partitioned sublists 2306 ListMultimap<String, JpaPid> lists = 2307 MultimapBuilder.hashKeys().arrayListValues().build(); 2308 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2309 String partitionId = nextRoundMatch.getPartitionId() != null 2310 ? nextRoundMatch.getPartitionId().toString() 2311 : ""; 2312 lists.put(partitionId, nextRoundMatch); 2313 } 2314 2315 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2316 for (String key : lists.keySet()) { 2317 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2318 retVal.addAll(nextPartition); 2319 } 2320 2321 // In unit test mode, we sort the results just for unit test predictability 2322 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2323 retVal = retVal.stream() 2324 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2325 .collect(Collectors.toList()); 2326 } 2327 2328 return retVal; 2329 } 2330 2331 private void attemptComboUniqueSpProcessing( 2332 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2333 RuntimeSearchParam comboParam = null; 2334 List<String> comboParamNames = null; 2335 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2336 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2337 if (!exactMatchParams.isEmpty()) { 2338 comboParam = exactMatchParams.get(0); 2339 comboParamNames = new ArrayList<>(theParams.keySet()); 2340 } 2341 2342 if (comboParam == null) { 2343 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2344 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2345 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2346 List<String> nextCandidateParamNames = 2347 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2348 .map(RuntimeSearchParam::getName) 2349 .collect(Collectors.toList()); 2350 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2351 comboParam = nextCandidate; 2352 comboParamNames = nextCandidateParamNames; 2353 break; 2354 } 2355 } 2356 } 2357 2358 if (comboParam != null) { 2359 Collections.sort(comboParamNames); 2360 2361 // Since we're going to remove elements below 2362 theParams.values().forEach(this::ensureSubListsAreWritable); 2363 2364 /* 2365 * Apply search against the combo param index in a loop: 2366 * 2367 * 1. First we check whether the actual parameter values in the 2368 * parameter map are actually usable for searching against the combo 2369 * param index. E.g. no search modifiers, date comparators, etc., 2370 * since these mean you can't use the combo index. 2371 * 2372 * 2. Apply and create the join SQl. We remove parameter values from 2373 * the map as we apply them, so any parameter values remaining in the 2374 * map after each loop haven't yet been factored into the SQL. 2375 * 2376 * The loop allows us to create multiple combo index joins if there 2377 * are multiple AND expressions for the related parameters. 2378 */ 2379 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2380 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2381 } 2382 } 2383 } 2384 2385 private void applyComboSearchParam( 2386 QueryStack theQueryStack, 2387 @Nonnull SearchParameterMap theParams, 2388 RequestDetails theRequest, 2389 List<String> theComboParamNames, 2390 RuntimeSearchParam theComboParam) { 2391 2392 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2393 for (String nextParamName : theComboParamNames) { 2394 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2395 inputs.add(nextValues); 2396 } 2397 2398 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2399 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2400 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2401 2402 StringBuilder searchStringBuilder = new StringBuilder(); 2403 searchStringBuilder.append(myResourceName); 2404 searchStringBuilder.append("?"); 2405 2406 boolean first = true; 2407 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2408 2409 String nextParamName = theComboParamNames.get(paramIndex); 2410 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2411 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2412 // As a result, we strip the prefix if present. 2413 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2414 2415 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2416 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2417 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2418 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2419 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2420 } 2421 } 2422 2423 if (first) { 2424 first = false; 2425 } else { 2426 searchStringBuilder.append('&'); 2427 } 2428 2429 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2430 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2431 2432 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2433 } 2434 2435 String indexString = searchStringBuilder.toString(); 2436 ourLog.debug( 2437 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2438 2439 indexStrings.add(indexString); 2440 } 2441 2442 // Just to make sure we're stable for tests 2443 indexStrings.sort(Comparator.naturalOrder()); 2444 2445 // Interceptor broadcast: JPA_PERFTRACE_INFO 2446 IInterceptorBroadcaster compositeBroadcaster = 2447 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2448 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2449 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2450 StorageProcessingMessage msg = new StorageProcessingMessage() 2451 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2452 + indexStringForLog); 2453 HookParams params = new HookParams() 2454 .add(RequestDetails.class, theRequest) 2455 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2456 .add(StorageProcessingMessage.class, msg); 2457 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2458 } 2459 2460 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2461 case UNIQUE: 2462 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2463 break; 2464 case NON_UNIQUE: 2465 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2466 break; 2467 } 2468 2469 // Remove any empty parameters remaining after this 2470 theParams.clean(); 2471 } 2472 2473 /** 2474 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2475 * searching against a combo param with the given parameter names. This might be {@literal false} if 2476 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2477 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2478 */ 2479 private boolean validateParamValuesAreValidForComboParam( 2480 RequestDetails theRequest, 2481 @Nonnull SearchParameterMap theParams, 2482 List<String> theComboParamNames, 2483 RuntimeSearchParam theComboParam) { 2484 boolean paramValuesAreValidForCombo = true; 2485 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2486 2487 for (String nextParamName : theComboParamNames) { 2488 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2489 2490 if (nextValues == null || nextValues.isEmpty()) { 2491 paramValuesAreValidForCombo = false; 2492 break; 2493 } 2494 2495 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2496 paramOrValues.add(nextAndValue); 2497 2498 for (IQueryParameterType nextOrValue : nextAndValue) { 2499 if (nextOrValue instanceof DateParam dateParam) { 2500 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2501 String message = "Search with params " + theComboParamNames 2502 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2503 + nextParamName + "'"; 2504 firePerformanceInfo(theRequest, message); 2505 paramValuesAreValidForCombo = false; 2506 break; 2507 } 2508 } 2509 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2510 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2511 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2512 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2513 if (prefix != null && prefix != EQUAL) { 2514 String message = "Search with params " + theComboParamNames 2515 + " is not a candidate for combo searching - Parameter '" + nextParamName 2516 + "' has prefix: '" 2517 + paramWithPrefix.getPrefix().getValue() + "'"; 2518 firePerformanceInfo(theRequest, message); 2519 paramValuesAreValidForCombo = false; 2520 break; 2521 } 2522 } 2523 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2524 String message = "Search with params " + theComboParamNames 2525 + " is not a candidate for combo searching - Parameter '" + nextParamName 2526 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2527 firePerformanceInfo(theRequest, message); 2528 paramValuesAreValidForCombo = false; 2529 break; 2530 } 2531 } 2532 2533 // Reference params are only eligible for using a composite index if they 2534 // are qualified 2535 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2536 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2537 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2538 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2539 if (isBlank(param.getResourceType())) { 2540 ourLog.debug( 2541 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2542 paramValuesAreValidForCombo = false; 2543 break; 2544 } 2545 } 2546 2547 // Date params are not eligible for using composite unique index 2548 // as index could contain date with different precision (e.g. DAY, SECOND) 2549 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2550 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2551 ourLog.debug( 2552 "Search with params {} is not a candidate for combo searching - " 2553 + "Unique combo search parameter '{}' has DATE type", 2554 theComboParamNames, 2555 nextParamName); 2556 paramValuesAreValidForCombo = false; 2557 break; 2558 } 2559 } 2560 2561 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2562 ourLog.debug( 2563 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2564 paramValuesAreValidForCombo = false; 2565 } 2566 2567 return paramValuesAreValidForCombo; 2568 } 2569 2570 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2571 for (int i = 0; i < theListOfLists.size(); i++) { 2572 List<T> oldSubList = theListOfLists.get(i); 2573 if (!(oldSubList instanceof ArrayList)) { 2574 List<T> newSubList = new ArrayList<>(oldSubList); 2575 theListOfLists.set(i, newSubList); 2576 } 2577 } 2578 } 2579 2580 @Override 2581 public void setFetchSize(int theFetchSize) { 2582 myFetchSize = theFetchSize; 2583 } 2584 2585 public SearchParameterMap getParams() { 2586 return myParams; 2587 } 2588 2589 public CriteriaBuilder getBuilder() { 2590 return myCriteriaBuilder; 2591 } 2592 2593 public Class<? extends IBaseResource> getResourceType() { 2594 return myResourceType; 2595 } 2596 2597 public String getResourceName() { 2598 return myResourceName; 2599 } 2600 2601 /** 2602 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2603 */ 2604 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2605 2606 private final RequestDetails myRequest; 2607 private final Set<JpaPid> myCurrentPids; 2608 private Iterator<JpaPid> myCurrentIterator; 2609 private JpaPid myNext; 2610 2611 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2612 myCurrentPids = new HashSet<>(thePidSet); 2613 myCurrentIterator = null; 2614 myRequest = theRequest; 2615 } 2616 2617 private void fetchNext() { 2618 while (myNext == null) { 2619 2620 if (myCurrentIterator == null) { 2621 Set<Include> includes = new HashSet<>(); 2622 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2623 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2624 for (IQueryParameterType type : typeList) { 2625 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2626 for (String resourceType : queryString.split(",")) { 2627 String rt = resourceType.trim(); 2628 if (isNotBlank(rt)) { 2629 includes.add(new Include(rt + ":*", true)); 2630 } 2631 } 2632 } 2633 } 2634 } 2635 if (includes.isEmpty()) { 2636 includes.add(new Include("*", true)); 2637 } 2638 Set<JpaPid> newPids = loadIncludes( 2639 myContext, 2640 myEntityManager, 2641 myCurrentPids, 2642 includes, 2643 false, 2644 getParams().getLastUpdated(), 2645 mySearchUuid, 2646 myRequest, 2647 null); 2648 myCurrentIterator = newPids.iterator(); 2649 } 2650 2651 if (myCurrentIterator.hasNext()) { 2652 myNext = myCurrentIterator.next(); 2653 } else { 2654 myNext = NO_MORE; 2655 } 2656 } 2657 } 2658 2659 @Override 2660 public boolean hasNext() { 2661 fetchNext(); 2662 return !NO_MORE.equals(myNext); 2663 } 2664 2665 @Override 2666 public JpaPid next() { 2667 fetchNext(); 2668 JpaPid retVal = myNext; 2669 myNext = null; 2670 return retVal; 2671 } 2672 } 2673 /** 2674 * Basic Query iterator, used to fetch the results of a query. 2675 */ 2676 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2677 2678 private final SearchRuntimeDetails mySearchRuntimeDetails; 2679 2680 private final RequestDetails myRequest; 2681 private final boolean myHaveRawSqlHooks; 2682 private final boolean myHavePerfTraceFoundIdHook; 2683 private final SortSpec mySort; 2684 private final Integer myOffset; 2685 private final IInterceptorBroadcaster myCompositeBroadcaster; 2686 private boolean myFirst = true; 2687 private IncludesIterator myIncludesIterator; 2688 /** 2689 * The next JpaPid value of the next result in this query. 2690 * Will not be null if fetched using getNext() 2691 */ 2692 private JpaPid myNext; 2693 /** 2694 * The current query result iterator running sql and supplying PIDs 2695 * @see #myQueryList 2696 */ 2697 private ISearchQueryExecutor myResultsIterator; 2698 2699 private boolean myFetchIncludesForEverythingOperation; 2700 2701 /** 2702 * The count of resources skipped because they were seen in earlier results 2703 */ 2704 private int mySkipCount = 0; 2705 /** 2706 * The count of resources that are new in this search 2707 * (ie, not cached in previous searches) 2708 */ 2709 private int myNonSkipCount = 0; 2710 /** 2711 * The list of queries to use to find all results. 2712 * Normal JPA queries will normally have a single entry. 2713 * Queries that involve Hibernate Search/Elasticsearch may have 2714 * multiple queries because of chunking. 2715 * The $everything operation also jams some extra results in. 2716 */ 2717 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2718 2719 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2720 mySearchRuntimeDetails = theSearchRuntimeDetails; 2721 mySort = myParams.getSort(); 2722 myOffset = myParams.getOffset(); 2723 myRequest = theRequest; 2724 myCompositeBroadcaster = 2725 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2726 2727 // everything requires fetching recursively all related resources 2728 if (myParams.getEverythingMode() != null) { 2729 myFetchIncludesForEverythingOperation = true; 2730 } 2731 2732 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2733 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2734 } 2735 2736 private void fetchNext() { 2737 try { 2738 if (myHaveRawSqlHooks) { 2739 CurrentThreadCaptureQueriesListener.startCapturing(); 2740 } 2741 2742 // If we don't have a query yet, create one 2743 if (myResultsIterator == null) { 2744 if (!mySearchProperties.hasMaxResultsRequested()) { 2745 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2746 } 2747 2748 /* 2749 * assigns the results iterator 2750 * and populates the myQueryList. 2751 */ 2752 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2753 } 2754 2755 if (myNext == null) { 2756 // no next means we need a new query (if one is available) 2757 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2758 /* 2759 * Because we combine our DB searches with Lucene 2760 * sometimes we can have multiple results iterators 2761 * (with only some having data in them to extract). 2762 * 2763 * We'll iterate our results iterators until we 2764 * either run out of results iterators, or we 2765 * have one that actually has data in it. 2766 */ 2767 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2768 retrieveNextIteratorQuery(); 2769 } 2770 2771 if (!myResultsIterator.hasNext()) { 2772 // we couldn't find a results iterator; 2773 // we're done here 2774 break; 2775 } 2776 2777 JpaPid nextPid = myResultsIterator.next(); 2778 if (myHavePerfTraceFoundIdHook) { 2779 callPerformanceTracingHook(nextPid); 2780 } 2781 2782 if (nextPid != null) { 2783 if (!myPidSet.contains(nextPid)) { 2784 if (!mySearchProperties.isDeduplicateInDatabase()) { 2785 /* 2786 * We only add to the map if we aren't fetching "everything"; 2787 * otherwise, we let the de-duplication happen in the database 2788 * (see createChunkedQueryNormalSearch above), because it 2789 * saves memory that way. 2790 */ 2791 myPidSet.add(nextPid); 2792 } 2793 if (doNotSkipNextPidForEverything()) { 2794 myNext = nextPid; 2795 myNonSkipCount++; 2796 break; 2797 } 2798 } else { 2799 mySkipCount++; 2800 } 2801 } 2802 2803 if (!myResultsIterator.hasNext()) { 2804 if (mySearchProperties.hasMaxResultsRequested() 2805 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2806 if (mySkipCount > 0 && myNonSkipCount == 0) { 2807 sendProcessingMsgAndFirePerformanceHook(); 2808 // need the next iterator; increase the maxsize 2809 // (we should always do this) 2810 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2811 mySearchProperties.setMaxResultsRequested(maxResults); 2812 2813 if (!mySearchProperties.isDeduplicateInDatabase()) { 2814 // if we're not using the database to deduplicate 2815 // we should recheck our memory usage 2816 // the prefetch size check is future proofing 2817 int prefetchSize = myStorageSettings 2818 .getSearchPreFetchThresholds() 2819 .size(); 2820 if (prefetchSize > 0) { 2821 if (myStorageSettings 2822 .getSearchPreFetchThresholds() 2823 .get(prefetchSize - 1) 2824 < mySearchProperties.getMaxResultsRequested()) { 2825 mySearchProperties.setDeduplicateInDatabase(true); 2826 } 2827 } 2828 } 2829 2830 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2831 } 2832 } 2833 } 2834 } 2835 } 2836 2837 if (myNext == null) { 2838 // if we got here, it means the current JpaPid has already been processed, 2839 // and we will decide (here) if we need to fetch related resources recursively 2840 if (myFetchIncludesForEverythingOperation) { 2841 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2842 myFetchIncludesForEverythingOperation = false; 2843 } 2844 if (myIncludesIterator != null) { 2845 while (myIncludesIterator.hasNext()) { 2846 JpaPid next = myIncludesIterator.next(); 2847 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2848 myNext = next; 2849 break; 2850 } 2851 } 2852 if (myNext == null) { 2853 myNext = NO_MORE; 2854 } 2855 } else { 2856 myNext = NO_MORE; 2857 } 2858 } 2859 2860 if (!mySearchProperties.hasMaxResultsRequested()) { 2861 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2862 } else { 2863 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2864 } 2865 2866 } finally { 2867 // search finished - fire hooks 2868 if (myHaveRawSqlHooks) { 2869 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2870 } 2871 } 2872 2873 if (myFirst) { 2874 HookParams params = new HookParams() 2875 .add(RequestDetails.class, myRequest) 2876 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2877 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2878 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2879 myFirst = false; 2880 } 2881 2882 if (NO_MORE.equals(myNext)) { 2883 HookParams params = new HookParams() 2884 .add(RequestDetails.class, myRequest) 2885 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2886 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2887 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2888 } 2889 } 2890 2891 private Integer calculateMaxResultsToFetch() { 2892 if (myParams.getLoadSynchronousUpTo() != null) { 2893 return myParams.getLoadSynchronousUpTo(); 2894 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2895 return myParams.getEverythingMode() != null 2896 ? myParams.getOffset() + myParams.getCount() 2897 : myParams.getCount(); 2898 } else { 2899 return myStorageSettings.getFetchSizeDefaultMaximum(); 2900 } 2901 } 2902 2903 private boolean doNotSkipNextPidForEverything() { 2904 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2905 } 2906 2907 private void callPerformanceTracingHook(JpaPid theNextPid) { 2908 HookParams params = new HookParams() 2909 .add(Integer.class, System.identityHashCode(this)) 2910 .add(Object.class, theNextPid); 2911 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2912 } 2913 2914 private void sendProcessingMsgAndFirePerformanceHook() { 2915 String msg = "Pass completed with no matching results seeking rows " 2916 + myPidSet.size() + "-" + mySkipCount 2917 + ". This indicates an inefficient query! Retrying with new max count of " 2918 + mySearchProperties.getMaxResultsRequested(); 2919 firePerformanceWarning(myRequest, msg); 2920 } 2921 2922 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2923 Integer offset = theOffset; 2924 if (myQueryList.isEmpty()) { 2925 // Capture times for Lucene/Elasticsearch queries as well 2926 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2927 2928 // setting offset to 0 to fetch all resource ids to guarantee 2929 // correct output result for everything operation during paging 2930 if (myParams.getEverythingMode() != null) { 2931 offset = 0; 2932 } 2933 2934 SearchQueryProperties properties = mySearchProperties.clone(); 2935 properties 2936 .setOffset(offset) 2937 .setMaxResultsRequested(theMaxResultsToFetch) 2938 .setDoCountOnlyFlag(false) 2939 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2940 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2941 } 2942 2943 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2944 2945 retrieveNextIteratorQuery(); 2946 2947 mySkipCount = 0; 2948 myNonSkipCount = 0; 2949 } 2950 2951 private void retrieveNextIteratorQuery() { 2952 close(); 2953 if (isNotEmpty(myQueryList)) { 2954 myResultsIterator = myQueryList.remove(0); 2955 myHasNextIteratorQuery = true; 2956 } else { 2957 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2958 myHasNextIteratorQuery = false; 2959 } 2960 } 2961 2962 @Override 2963 public boolean hasNext() { 2964 if (myNext == null) { 2965 fetchNext(); 2966 } 2967 return !NO_MORE.equals(myNext); 2968 } 2969 2970 @Override 2971 public JpaPid next() { 2972 fetchNext(); 2973 JpaPid retVal = myNext; 2974 myNext = null; 2975 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2976 return retVal; 2977 } 2978 2979 @Override 2980 public int getSkippedCount() { 2981 return mySkipCount; 2982 } 2983 2984 @Override 2985 public int getNonSkippedCount() { 2986 return myNonSkipCount; 2987 } 2988 2989 @Override 2990 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2991 Collection<JpaPid> batch = new ArrayList<>(); 2992 while (this.hasNext() && batch.size() < theBatchSize) { 2993 batch.add(this.next()); 2994 } 2995 return batch; 2996 } 2997 2998 @Override 2999 public void close() { 3000 if (myResultsIterator != null) { 3001 myResultsIterator.close(); 3002 } 3003 myResultsIterator = null; 3004 } 3005 } 3006 3007 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 3008 // Only log at debug level since these messages aren't considered important enough 3009 // that we should be cluttering the system log, but they are important to the 3010 // specific query being executed to we'll INFO level them there 3011 ourLog.debug(theMessage); 3012 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 3013 } 3014 3015 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3016 ourLog.warn(theMessage); 3017 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3018 } 3019 3020 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3021 IInterceptorBroadcaster compositeBroadcaster = 3022 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3023 if (compositeBroadcaster.hasHooks(thePointcut)) { 3024 StorageProcessingMessage message = new StorageProcessingMessage(); 3025 message.setMessage(theMessage); 3026 HookParams params = new HookParams() 3027 .add(RequestDetails.class, theRequest) 3028 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3029 .add(StorageProcessingMessage.class, message); 3030 compositeBroadcaster.callHooks(thePointcut, params); 3031 } 3032 } 3033 3034 public static int getMaximumPageSize() { 3035 if (myMaxPageSizeForTests != null) { 3036 return myMaxPageSizeForTests; 3037 } 3038 return MAXIMUM_PAGE_SIZE; 3039 } 3040 3041 public static void setMaxPageSizeForTest(Integer theTestSize) { 3042 myMaxPageSizeForTests = theTestSize; 3043 } 3044 3045 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3046 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3047 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3048 } 3049}