
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.model.util.JpaConstants; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 081import ca.uhn.fhir.jpa.util.SqlQueryList; 082import ca.uhn.fhir.model.api.IQueryParameterType; 083import ca.uhn.fhir.model.api.Include; 084import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 085import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 086import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 087import ca.uhn.fhir.rest.api.Constants; 088import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 089import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 090import ca.uhn.fhir.rest.api.SortOrderEnum; 091import ca.uhn.fhir.rest.api.SortSpec; 092import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 093import ca.uhn.fhir.rest.api.server.RequestDetails; 094import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 095import ca.uhn.fhir.rest.param.DateParam; 096import ca.uhn.fhir.rest.param.DateRangeParam; 097import ca.uhn.fhir.rest.param.ParamPrefixEnum; 098import ca.uhn.fhir.rest.param.ParameterUtil; 099import ca.uhn.fhir.rest.param.ReferenceParam; 100import ca.uhn.fhir.rest.param.StringParam; 101import ca.uhn.fhir.rest.param.TokenParam; 102import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 103import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 104import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 105import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 106import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 107import ca.uhn.fhir.svcs.ISearchLimiterSvc; 108import ca.uhn.fhir.system.HapiSystemProperties; 109import ca.uhn.fhir.util.SearchParameterUtil; 110import ca.uhn.fhir.util.StopWatch; 111import ca.uhn.fhir.util.StringUtil; 112import ca.uhn.fhir.util.UrlUtil; 113import com.google.common.annotations.VisibleForTesting; 114import com.google.common.collect.ListMultimap; 115import com.google.common.collect.Lists; 116import com.google.common.collect.MultimapBuilder; 117import com.healthmarketscience.sqlbuilder.Condition; 118import jakarta.annotation.Nonnull; 119import jakarta.annotation.Nullable; 120import jakarta.persistence.EntityManager; 121import jakarta.persistence.PersistenceContext; 122import jakarta.persistence.PersistenceContextType; 123import jakarta.persistence.Query; 124import jakarta.persistence.Tuple; 125import jakarta.persistence.TypedQuery; 126import jakarta.persistence.criteria.CriteriaBuilder; 127import jakarta.persistence.criteria.CriteriaQuery; 128import jakarta.persistence.criteria.Predicate; 129import jakarta.persistence.criteria.Root; 130import jakarta.persistence.criteria.Selection; 131import org.apache.commons.collections4.ListUtils; 132import org.apache.commons.lang3.StringUtils; 133import org.apache.commons.lang3.Validate; 134import org.apache.commons.lang3.math.NumberUtils; 135import org.apache.commons.lang3.tuple.Pair; 136import org.hibernate.ScrollMode; 137import org.hibernate.ScrollableResults; 138import org.hl7.fhir.instance.model.api.IAnyResource; 139import org.hl7.fhir.instance.model.api.IBaseResource; 140import org.hl7.fhir.instance.model.api.IIdType; 141import org.slf4j.Logger; 142import org.slf4j.LoggerFactory; 143import org.springframework.beans.factory.annotation.Autowired; 144import org.springframework.jdbc.core.JdbcTemplate; 145import org.springframework.transaction.support.TransactionSynchronizationManager; 146 147import java.util.ArrayList; 148import java.util.Arrays; 149import java.util.Collection; 150import java.util.Collections; 151import java.util.Comparator; 152import java.util.HashMap; 153import java.util.HashSet; 154import java.util.Iterator; 155import java.util.LinkedList; 156import java.util.List; 157import java.util.Map; 158import java.util.Objects; 159import java.util.Set; 160import java.util.stream.Collectors; 161 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 163import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 165import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 166import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 167import static java.util.Objects.requireNonNull; 168import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 169import static org.apache.commons.lang3.StringUtils.isBlank; 170import static org.apache.commons.lang3.StringUtils.isNotBlank; 171import static org.apache.commons.lang3.StringUtils.stripStart; 172 173/** 174 * The SearchBuilder is responsible for actually forming the SQL query that handles 175 * searches for resources 176 */ 177public class SearchBuilder implements ISearchBuilder<JpaPid> { 178 179 /** 180 * See loadResourcesByPid 181 * for an explanation of why we use the constant 800 182 */ 183 // NB: keep public 184 @Deprecated 185 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 186 187 public static final String RESOURCE_ID_ALIAS = "resource_id"; 188 public static final String PARTITION_ID_ALIAS = "partition_id"; 189 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 190 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 191 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 192 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 193 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 194 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 195 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 196 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 197 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 198 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 199 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 200 public static boolean myUseMaxPageSize50ForTest = false; 201 public static Integer myMaxPageSizeForTests = null; 202 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 203 protected final IResourceTagDao myResourceTagDao; 204 private String myResourceName; 205 private final Class<? extends IBaseResource> myResourceType; 206 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 207 private final SqlObjectFactory mySqlBuilderFactory; 208 private final HibernatePropertiesProvider myDialectProvider; 209 private final ISearchParamRegistry mySearchParamRegistry; 210 private final PartitionSettings myPartitionSettings; 211 private final DaoRegistry myDaoRegistry; 212 private final FhirContext myContext; 213 private final IIdHelperService<JpaPid> myIdHelperService; 214 private final JpaStorageSettings myStorageSettings; 215 private final SearchQueryProperties mySearchProperties; 216 private final IResourceHistoryTableDao myResourceHistoryTableDao; 217 private final IJpaStorageResourceParser myJpaStorageResourceParser; 218 219 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 220 protected EntityManager myEntityManager; 221 222 private CriteriaBuilder myCriteriaBuilder; 223 private SearchParameterMap myParams; 224 private String mySearchUuid; 225 private int myFetchSize; 226 227 private boolean myRequiresTotal; 228 229 /** 230 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 231 */ 232 private Set<JpaPid> myPidSet; 233 234 private boolean myHasNextIteratorQuery = false; 235 private RequestPartitionId myRequestPartitionId; 236 237 private IFulltextSearchSvc myFulltextSearchSvc; 238 239 private final ISearchLimiterSvc mySearchLimiterSvc; 240 241 @Autowired(required = false) 242 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 243 myFulltextSearchSvc = theFulltextSearchSvc; 244 } 245 246 @Autowired(required = false) 247 private IElasticsearchSvc myIElasticsearchSvc; 248 249 @Autowired 250 private IResourceHistoryTagDao myResourceHistoryTagDao; 251 252 @Autowired 253 private IRequestPartitionHelperSvc myPartitionHelperSvc; 254 255 /** 256 * Constructor 257 */ 258 @SuppressWarnings({"rawtypes", "unchecked"}) 259 public SearchBuilder( 260 String theResourceName, 261 JpaStorageSettings theStorageSettings, 262 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 263 SqlObjectFactory theSqlBuilderFactory, 264 HibernatePropertiesProvider theDialectProvider, 265 ISearchParamRegistry theSearchParamRegistry, 266 PartitionSettings thePartitionSettings, 267 IInterceptorBroadcaster theInterceptorBroadcaster, 268 IResourceTagDao theResourceTagDao, 269 DaoRegistry theDaoRegistry, 270 FhirContext theContext, 271 IIdHelperService theIdHelperService, 272 IResourceHistoryTableDao theResourceHistoryTagDao, 273 IJpaStorageResourceParser theIJpaStorageResourceParser, 274 ISearchLimiterSvc theSearchLimiterSvc, 275 Class<? extends IBaseResource> theResourceType) { 276 myResourceName = theResourceName; 277 myResourceType = theResourceType; 278 myStorageSettings = theStorageSettings; 279 mySearchLimiterSvc = theSearchLimiterSvc; 280 281 myEntityManagerFactory = theEntityManagerFactory; 282 mySqlBuilderFactory = theSqlBuilderFactory; 283 myDialectProvider = theDialectProvider; 284 mySearchParamRegistry = theSearchParamRegistry; 285 myPartitionSettings = thePartitionSettings; 286 myInterceptorBroadcaster = theInterceptorBroadcaster; 287 myResourceTagDao = theResourceTagDao; 288 myDaoRegistry = theDaoRegistry; 289 myContext = theContext; 290 myIdHelperService = theIdHelperService; 291 myResourceHistoryTableDao = theResourceHistoryTagDao; 292 myJpaStorageResourceParser = theIJpaStorageResourceParser; 293 294 mySearchProperties = new SearchQueryProperties(); 295 } 296 297 @VisibleForTesting 298 void setResourceName(String theName) { 299 myResourceName = theName; 300 } 301 302 @Override 303 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 304 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 305 } 306 307 @Override 308 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 309 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 310 } 311 312 @Override 313 public void setRequireTotal(boolean theRequireTotal) { 314 myRequiresTotal = theRequireTotal; 315 } 316 317 @Override 318 public boolean requiresTotal() { 319 return myRequiresTotal; 320 } 321 322 private void searchForIdsWithAndOr( 323 SearchQueryBuilder theSearchSqlBuilder, 324 QueryStack theQueryStack, 325 @Nonnull SearchParameterMap theParams, 326 RequestDetails theRequest) { 327 myParams = theParams; 328 mySearchProperties.setSortSpec(myParams.getSort()); 329 330 // Remove any empty parameters 331 theParams.clean(); 332 333 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 334 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 335 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 336 } 337 338 // Attempt to lookup via composite unique key. 339 if (isCompositeUniqueSpCandidate()) { 340 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 341 } 342 343 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 344 List<String> paramNames = myParams.keySet().stream() 345 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 346 .filter(t -> !t.equals(Constants.PARAM_TAG)) 347 .collect(Collectors.toList()); 348 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 349 paramNames.add(IAnyResource.SP_RES_ID); 350 } 351 if (myParams.containsKey(Constants.PARAM_TAG)) { 352 paramNames.add(Constants.PARAM_TAG); 353 } 354 355 // Handle each parameter 356 for (String nextParamName : paramNames) { 357 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 358 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 359 // Elasticsearch 360 continue; 361 } 362 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 363 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 364 .setParamName(nextParamName) 365 .setAndOrParams(andOrParams) 366 .setRequest(theRequest) 367 .setRequestPartitionId(myRequestPartitionId)); 368 if (predicate != null) { 369 theSearchSqlBuilder.addPredicate(predicate); 370 } 371 } 372 } 373 374 /** 375 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 376 * parameters all have no modifiers. 377 */ 378 private boolean isCompositeUniqueSpCandidate() { 379 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 380 } 381 382 @SuppressWarnings("ConstantConditions") 383 @Override 384 public Long createCountQuery( 385 SearchParameterMap theParams, 386 String theSearchUuid, 387 RequestDetails theRequest, 388 @Nonnull RequestPartitionId theRequestPartitionId) { 389 390 assert theRequestPartitionId != null; 391 assert TransactionSynchronizationManager.isActualTransactionActive(); 392 393 init(theParams, theSearchUuid, theRequestPartitionId); 394 395 if (checkUseHibernateSearch()) { 396 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 397 } 398 399 SearchQueryProperties properties = mySearchProperties.clone(); 400 properties.setDoCountOnlyFlag(true); 401 properties.setSortSpec(null); // counts don't require sorts 402 properties.setMaxResultsRequested(null); 403 properties.setOffset(null); 404 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 405 if (queries.isEmpty()) { 406 return 0L; 407 } else { 408 JpaPid jpaPid = queries.get(0).next(); 409 return jpaPid.getId(); 410 } 411 } 412 413 /** 414 * @param thePidSet May be null 415 */ 416 @Override 417 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 418 myPidSet = new HashSet<>(thePidSet); 419 } 420 421 @SuppressWarnings("ConstantConditions") 422 @Override 423 public IResultIterator<JpaPid> createQuery( 424 SearchParameterMap theParams, 425 SearchRuntimeDetails theSearchRuntimeDetails, 426 RequestDetails theRequest, 427 @Nonnull RequestPartitionId theRequestPartitionId) { 428 assert theRequestPartitionId != null; 429 assert TransactionSynchronizationManager.isActualTransactionActive(); 430 431 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 432 433 if (myPidSet == null) { 434 myPidSet = new HashSet<>(); 435 } 436 437 return new QueryIterator(theSearchRuntimeDetails, theRequest); 438 } 439 440 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 441 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 442 // we mutate the params. Make a private copy. 443 myParams = theParams.clone(); 444 mySearchProperties.setSortSpec(myParams.getSort()); 445 mySearchUuid = theSearchUuid; 446 myRequestPartitionId = theRequestPartitionId; 447 } 448 449 /** 450 * The query created can be either a count query or the 451 * actual query. 452 * This is why it takes a SearchQueryProperties object 453 * (and doesn't use the local version of it). 454 * The properties may differ slightly for whichever 455 * query this is. 456 */ 457 private List<ISearchQueryExecutor> createQuery( 458 SearchParameterMap theParams, 459 SearchQueryProperties theSearchProperties, 460 RequestDetails theRequest, 461 SearchRuntimeDetails theSearchRuntimeDetails) { 462 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 463 464 if (checkUseHibernateSearch()) { 465 // we're going to run at least part of the search against the Fulltext service. 466 467 // Ugh - we have two different return types for now 468 ISearchQueryExecutor fulltextExecutor = null; 469 List<JpaPid> fulltextMatchIds = null; 470 int resultCount = 0; 471 if (myParams.isLastN()) { 472 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 473 resultCount = fulltextMatchIds.size(); 474 } else if (myParams.getEverythingMode() != null) { 475 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 476 resultCount = fulltextMatchIds.size(); 477 } else { 478 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 479 // enabled SP indexing). 480 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 481 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 482 // extra work in Elastic. 483 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 484 485 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 486 // a page. 487 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 488 } 489 490 if (fulltextExecutor == null) { 491 fulltextExecutor = 492 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 493 } 494 495 if (theSearchRuntimeDetails != null) { 496 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 497 IInterceptorBroadcaster compositeBroadcaster = 498 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 499 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 500 HookParams params = new HookParams() 501 .add(RequestDetails.class, theRequest) 502 .addIfMatchesType(ServletRequestDetails.class, theRequest) 503 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 504 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 505 } 506 } 507 508 // can we skip the database entirely and return the pid list from here? 509 boolean canSkipDatabase = 510 // if we processed an AND clause, and it returned nothing, then nothing can match. 511 !fulltextExecutor.hasNext() 512 || 513 // Our hibernate search query doesn't respect partitions yet 514 (!myPartitionSettings.isPartitioningEnabled() 515 && 516 // were there AND terms left? Then we still need the db. 517 theParams.isEmpty() 518 && 519 // not every param is a param. :-( 520 theParams.getNearDistanceParam() == null 521 && 522 // todo MB don't we support _lastUpdated and _offset now? 523 theParams.getLastUpdated() == null 524 && theParams.getEverythingMode() == null 525 && theParams.getOffset() == null); 526 527 if (canSkipDatabase) { 528 ourLog.trace("Query finished after HSearch. Skip db query phase"); 529 if (theSearchProperties.hasMaxResultsRequested()) { 530 fulltextExecutor = SearchQueryExecutors.limited( 531 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 532 } 533 queries.add(fulltextExecutor); 534 } else { 535 ourLog.trace("Query needs db after HSearch. Chunking."); 536 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 537 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 538 QueryChunker.chunk( 539 fulltextExecutor, 540 SearchBuilder.getMaximumPageSize(), 541 // for each list of (SearchBuilder.getMaximumPageSize()) 542 // we create a chunked query and add it to 'queries' 543 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 544 } 545 } else { 546 // do everything in the database. 547 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 548 } 549 550 return queries; 551 } 552 553 /** 554 * Check to see if query should use Hibernate Search, and error if the query can't continue. 555 * 556 * @return true if the query should first be processed by Hibernate Search 557 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 558 */ 559 private boolean checkUseHibernateSearch() { 560 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 561 562 if (!fulltextEnabled) { 563 failIfUsed(Constants.PARAM_TEXT); 564 failIfUsed(Constants.PARAM_CONTENT); 565 } else { 566 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 567 final String paramName = sortSpec.getParamName(); 568 if (paramName.contains(".")) { 569 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 570 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 571 } 572 } 573 } 574 575 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 576 // can. 577 return fulltextEnabled 578 && myParams != null 579 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 580 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 581 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 582 } 583 584 private void failIfUsed(String theParamName) { 585 if (myParams.containsKey(theParamName)) { 586 throw new InvalidRequestException(Msg.code(1192) 587 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 588 } 589 } 590 591 private void failIfUsedWithChainedSort(String theParamName) { 592 if (myParams.containsKey(theParamName)) { 593 throw new InvalidRequestException(Msg.code(2524) 594 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 595 + theParamName); 596 } 597 } 598 599 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 600 // Can we use our hibernate search generated index on resource to support lastN?: 601 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 602 if (myFulltextSearchSvc == null) { 603 throw new InvalidRequestException(Msg.code(2027) 604 + "LastN operation is not enabled on this service, can not process this request"); 605 } 606 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 607 .map(t -> (JpaPid) t) 608 .collect(Collectors.toList()); 609 } else { 610 throw new InvalidRequestException( 611 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 612 } 613 } 614 615 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 616 JpaPid pid = null; 617 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 618 String idParamValue; 619 IQueryParameterType idParam = 620 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 621 if (idParam instanceof TokenParam idParm) { 622 idParamValue = idParm.getValue(); 623 } else { 624 StringParam idParm = (StringParam) idParam; 625 idParamValue = idParm.getValue(); 626 } 627 628 pid = myIdHelperService 629 .resolveResourceIdentity( 630 myRequestPartitionId, 631 myResourceName, 632 idParamValue, 633 ResolveIdentityMode.includeDeleted().cacheOk()) 634 .getPersistentId(); 635 } 636 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 637 } 638 639 private void doCreateChunkedQueries( 640 SearchParameterMap theParams, 641 List<JpaPid> thePids, 642 SearchQueryProperties theSearchQueryProperties, 643 RequestDetails theRequest, 644 ArrayList<ISearchQueryExecutor> theQueries) { 645 646 if (thePids.size() < getMaximumPageSize()) { 647 thePids = normalizeIdListForInClause(thePids); 648 } 649 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 650 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 651 } 652 653 /** 654 * Combs through the params for any _id parameters and extracts the PIDs for them 655 */ 656 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 657 // get all the IQueryParameterType objects 658 // for _id -> these should all be StringParam values 659 HashSet<IIdType> ids = new HashSet<>(); 660 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 661 for (List<IQueryParameterType> paramList : params) { 662 for (IQueryParameterType param : paramList) { 663 String id; 664 if (param instanceof StringParam) { 665 // we expect all _id values to be StringParams 666 id = ((StringParam) param).getValue(); 667 } else if (param instanceof TokenParam) { 668 id = ((TokenParam) param).getValue(); 669 } else { 670 // we do not expect the _id parameter to be a non-string value 671 throw new IllegalArgumentException( 672 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 673 } 674 675 IIdType idType = myContext.getVersion().newIdType(); 676 if (id.contains("/")) { 677 idType.setValue(id); 678 } else { 679 idType.setValue(myResourceName + "/" + id); 680 } 681 ids.add(idType); 682 } 683 } 684 685 // fetch our target Pids 686 // this will throw if an id is not found 687 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 688 myRequestPartitionId, 689 new ArrayList<>(ids), 690 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 691 692 // add the pids to targetPids 693 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 694 theTargetPids.add(pid.getPersistentId()); 695 } 696 } 697 698 private void createChunkedQuery( 699 SearchParameterMap theParams, 700 SearchQueryProperties theSearchProperties, 701 RequestDetails theRequest, 702 List<JpaPid> thePidList, 703 List<ISearchQueryExecutor> theSearchQueryExecutors) { 704 if (myParams.getEverythingMode() != null) { 705 createChunkedQueryForEverythingSearch( 706 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 707 } else { 708 createChunkedQueryNormalSearch( 709 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 710 } 711 } 712 713 private void createChunkedQueryNormalSearch( 714 SearchParameterMap theParams, 715 SearchQueryProperties theSearchProperties, 716 RequestDetails theRequest, 717 List<JpaPid> thePidList, 718 List<ISearchQueryExecutor> theSearchQueryExecutors) { 719 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 720 myContext, 721 myStorageSettings, 722 myPartitionSettings, 723 myRequestPartitionId, 724 myResourceName, 725 mySqlBuilderFactory, 726 myDialectProvider, 727 theSearchProperties.isDoCountOnlyFlag()); 728 QueryStack queryStack3 = new QueryStack( 729 theRequest, 730 theParams, 731 myStorageSettings, 732 myContext, 733 sqlBuilder, 734 mySearchParamRegistry, 735 myPartitionSettings); 736 737 if (theParams.keySet().size() > 1 738 || theParams.getSort() != null 739 || theParams.keySet().contains(Constants.PARAM_HAS) 740 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 741 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 742 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 743 if (activeComboParams.isEmpty()) { 744 sqlBuilder.setNeedResourceTableRoot(true); 745 } 746 } 747 748 /* 749 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 750 * specific filters with ORs as their root from working around the natural resource type / deletion 751 * status / partition IDs built into queries. 752 */ 753 if (theParams.containsKey(Constants.PARAM_FILTER)) { 754 Condition partitionIdPredicate = sqlBuilder 755 .getOrCreateResourceTablePredicateBuilder() 756 .createPartitionIdPredicate(myRequestPartitionId); 757 if (partitionIdPredicate != null) { 758 sqlBuilder.addPredicate(partitionIdPredicate); 759 } 760 } 761 762 // Normal search 763 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 764 765 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 766 // partition ID predicate in that case. 767 if (!sqlBuilder.haveAtLeastOnePredicate()) { 768 Condition partitionIdPredicate = sqlBuilder 769 .getOrCreateResourceTablePredicateBuilder() 770 .createPartitionIdPredicate(myRequestPartitionId); 771 if (partitionIdPredicate != null) { 772 sqlBuilder.addPredicate(partitionIdPredicate); 773 } 774 } 775 776 // Add PID list predicate for full text search and/or lastn operation 777 addPidListPredicate(thePidList, sqlBuilder); 778 779 // Last updated 780 addLastUpdatePredicate(sqlBuilder); 781 782 /* 783 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 784 * to something needed to guarantee correct results. 785 * 786 * Why do we need it? Suppose for example, a query like: 787 * Observation?category=foo,bar,baz 788 * And suppose you have many resources that have all 3 of these category codes. In this case 789 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 790 * we may exhaust the query results without getting enough distinct results back. When that 791 * happens we re-run the query with a larger limit. Excluding results we already know about 792 * tries to ensure that we get new unique results. 793 * 794 * The challenge with that though is that lots of DBs have an issue with too many 795 * parameters in one query. So we only do this optimization if there aren't too 796 * many results. 797 */ 798 if (myHasNextIteratorQuery) { 799 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 800 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 801 } 802 } 803 804 /* 805 * If offset is present, we want to deduplicate the results by using GROUP BY; 806 * OR 807 * if the MaxResultsToFetch is null, we are requesting "everything", 808 * so we'll let the db do the deduplication (instead of in-memory) 809 */ 810 if (theSearchProperties.isDeduplicateInDatabase()) { 811 queryStack3.addGrouping(); 812 queryStack3.setUseAggregate(true); 813 } 814 815 /* 816 * Sort 817 * 818 * If we have a sort, we wrap the criteria search (the search that actually 819 * finds the appropriate resources) in an outer search which is then sorted 820 */ 821 if (theSearchProperties.hasSort()) { 822 assert !theSearchProperties.isDoCountOnlyFlag(); 823 824 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 825 } 826 827 /* 828 * Now perform the search 829 */ 830 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 831 } 832 833 private void executeSearch( 834 SearchQueryProperties theProperties, 835 List<ISearchQueryExecutor> theSearchQueryExecutors, 836 SearchQueryBuilder sqlBuilder) { 837 GeneratedSql generatedSql = 838 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 839 if (!generatedSql.isMatchNothing()) { 840 SearchQueryExecutor executor = 841 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 842 theSearchQueryExecutors.add(executor); 843 } 844 } 845 846 private void createChunkedQueryForEverythingSearch( 847 RequestDetails theRequest, 848 SearchParameterMap theParams, 849 SearchQueryProperties theSearchQueryProperties, 850 List<JpaPid> thePidList, 851 List<ISearchQueryExecutor> theSearchQueryExecutors) { 852 853 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 854 myContext, 855 myStorageSettings, 856 myPartitionSettings, 857 myRequestPartitionId, 858 null, 859 mySqlBuilderFactory, 860 myDialectProvider, 861 theSearchQueryProperties.isDoCountOnlyFlag()); 862 863 QueryStack queryStack3 = new QueryStack( 864 theRequest, 865 theParams, 866 myStorageSettings, 867 myContext, 868 sqlBuilder, 869 mySearchParamRegistry, 870 myPartitionSettings); 871 872 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 873 874 Set<JpaPid> targetPids = new HashSet<>(); 875 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 876 877 extractTargetPidsFromIdParams(targetPids); 878 879 // add the target pids to our executors as the first 880 // results iterator to go through 881 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 882 } else { 883 // For Everything queries, we make the query root by the ResourceLink table, since this query 884 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 885 // the one problem with this approach is that it doesn't catch Patients that have absolutely 886 // nothing linked to them. So we do one additional query to make sure we catch those too. 887 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 888 myContext, 889 myStorageSettings, 890 myPartitionSettings, 891 myRequestPartitionId, 892 myResourceName, 893 mySqlBuilderFactory, 894 myDialectProvider, 895 theSearchQueryProperties.isDoCountOnlyFlag()); 896 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 897 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 898 String sql = allTargetsSql.getSql(); 899 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 900 901 List<JpaPid> output = 902 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 903 904 // we add a search executor to fetch unlinked patients first 905 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 906 } 907 908 List<String> typeSourceResources = new ArrayList<>(); 909 if (myParams.get(Constants.PARAM_TYPE) != null) { 910 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 911 } 912 913 queryStack3.addPredicateEverythingOperation( 914 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 915 916 // Add PID list predicate for full text search and/or lastn operation 917 addPidListPredicate(thePidList, sqlBuilder); 918 919 /* 920 * If offset is present, we want deduplicate the results by using GROUP BY 921 * ORDER BY is required to make sure we return unique results for each page 922 */ 923 if (theSearchQueryProperties.hasOffset()) { 924 queryStack3.addGrouping(); 925 queryStack3.addOrdering(); 926 queryStack3.setUseAggregate(true); 927 } 928 929 if (myParams.getEverythingMode().isPatient()) { 930 Collection<String> resourcesToOmit = 931 mySearchLimiterSvc.getResourcesToOmitForOperationSearches(JpaConstants.OPERATION_EVERYTHING); 932 sqlBuilder.excludeResourceTypesPredicate(resourcesToOmit); 933 } 934 935 /* 936 * Now perform the search 937 */ 938 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 939 } 940 941 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 942 if (thePidList != null && !thePidList.isEmpty()) { 943 theSqlBuilder.addResourceIdsPredicate(thePidList); 944 } 945 } 946 947 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 948 DateRangeParam lu = myParams.getLastUpdated(); 949 if (lu != null && !lu.isEmpty()) { 950 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 951 theSqlBuilder.addPredicate(lastUpdatedPredicates); 952 } 953 } 954 955 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 956 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 957 jdbcTemplate.setFetchSize(myFetchSize); 958 if (theMaximumResults != null) { 959 jdbcTemplate.setMaxRows(theMaximumResults); 960 } 961 return jdbcTemplate; 962 } 963 964 private Collection<String> extractTypeSourceResourcesFromParams() { 965 966 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 967 968 // first off, let's flatten the list of list 969 List<IQueryParameterType> iQueryParameterTypesList = 970 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 971 972 // then, extract all elements of each CSV into one big list 973 List<String> resourceTypes = iQueryParameterTypesList.stream() 974 .map(param -> ((StringParam) param).getValue()) 975 .map(csvString -> List.of(csvString.split(","))) 976 .flatMap(List::stream) 977 .collect(Collectors.toList()); 978 979 Set<String> knownResourceTypes = myContext.getResourceTypes(); 980 981 // remove leading/trailing whitespaces if any and remove duplicates 982 Set<String> retVal = new HashSet<>(); 983 984 for (String type : resourceTypes) { 985 String trimmed = type.trim(); 986 if (!knownResourceTypes.contains(trimmed)) { 987 throw new ResourceNotFoundException( 988 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 989 } 990 retVal.add(trimmed); 991 } 992 993 return retVal; 994 } 995 996 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 997 return myStorageSettings.isIndexOnContainedResources() 998 && theParams.values().stream() 999 .flatMap(Collection::stream) 1000 .flatMap(Collection::stream) 1001 .anyMatch(ReferenceParam.class::isInstance); 1002 } 1003 1004 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1005 if (theSort == null || isBlank(theSort.getParamName())) { 1006 return; 1007 } 1008 1009 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1010 1011 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1012 1013 theQueryStack.addSortOnResourceId(ascending); 1014 1015 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1016 1017 theQueryStack.addSortOnResourcePID(ascending); 1018 1019 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1020 1021 theQueryStack.addSortOnLastUpdated(ascending); 1022 1023 } else { 1024 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1025 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1026 1027 /* 1028 * If we have a sort like _sort=subject.name and we have an 1029 * uplifted refchain for that combination we can do it more efficiently 1030 * by using the index associated with the uplifted refchain. In this case, 1031 * we need to find the actual target search parameter (corresponding 1032 * to "name" in this example) so that we know what datatype it is. 1033 */ 1034 String paramName = theSort.getParamName(); 1035 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1036 String[] chains = StringUtils.split(paramName, '.'); 1037 if (chains.length == 2) { 1038 1039 // Given: Encounter?_sort=Patient:subject.name 1040 String referenceParam = chains[0]; // subject 1041 String referenceParamTargetType = null; // Patient 1042 String targetParam = chains[1]; // name 1043 1044 int colonIdx = referenceParam.indexOf(':'); 1045 if (colonIdx > -1) { 1046 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1047 referenceParam = referenceParam.substring(colonIdx + 1); 1048 } 1049 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1050 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1051 if (outerParam == null) { 1052 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1053 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1054 for (String nextTargetType : outerParam.getTargets()) { 1055 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1056 continue; 1057 } 1058 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1059 nextTargetType, 1060 targetParam, 1061 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1062 if (innerParam != null) { 1063 param = innerParam; 1064 break; 1065 } 1066 } 1067 } 1068 } 1069 } 1070 1071 int colonIdx = paramName.indexOf(':'); 1072 String referenceTargetType = null; 1073 if (colonIdx > -1) { 1074 referenceTargetType = paramName.substring(0, colonIdx); 1075 paramName = paramName.substring(colonIdx + 1); 1076 } 1077 1078 int dotIdx = paramName.indexOf('.'); 1079 String chainName = null; 1080 if (param == null && dotIdx > -1) { 1081 chainName = paramName.substring(dotIdx + 1); 1082 paramName = paramName.substring(0, dotIdx); 1083 if (chainName.contains(".")) { 1084 String msg = myContext 1085 .getLocalizer() 1086 .getMessageSanitized( 1087 BaseStorageDao.class, 1088 "invalidSortParameterTooManyChains", 1089 paramName + "." + chainName); 1090 throw new InvalidRequestException(Msg.code(2286) + msg); 1091 } 1092 } 1093 1094 if (param == null) { 1095 param = mySearchParamRegistry.getActiveSearchParam( 1096 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1097 } 1098 1099 if (param == null) { 1100 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1101 } 1102 1103 // param will never be null here (the above line throws if it does) 1104 // this is just to prevent the warning 1105 assert param != null; 1106 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1107 throw new InvalidRequestException( 1108 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1109 } 1110 1111 switch (param.getParamType()) { 1112 case STRING: 1113 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1114 break; 1115 case DATE: 1116 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1117 break; 1118 case REFERENCE: 1119 theQueryStack.addSortOnResourceLink( 1120 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1121 break; 1122 case TOKEN: 1123 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1124 break; 1125 case NUMBER: 1126 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1127 break; 1128 case URI: 1129 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1130 break; 1131 case QUANTITY: 1132 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1133 break; 1134 case COMPOSITE: 1135 List<RuntimeSearchParam> compositeList = 1136 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1137 if (compositeList == null) { 1138 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1139 + " is not defined by the resource " + myResourceName); 1140 } 1141 if (compositeList.size() != 2) { 1142 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1143 + " must have 2 composite types declared in parameter annotation, found " 1144 + compositeList.size()); 1145 } 1146 RuntimeSearchParam left = compositeList.get(0); 1147 RuntimeSearchParam right = compositeList.get(1); 1148 1149 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1150 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1151 1152 break; 1153 case SPECIAL: 1154 if (LOCATION_POSITION.equals(param.getPath())) { 1155 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1156 break; 1157 } 1158 throw new InvalidRequestException( 1159 Msg.code(2306) + "This server does not support _sort specifications of type " 1160 + param.getParamType() + " - Can't serve _sort=" + paramName); 1161 1162 case HAS: 1163 default: 1164 throw new InvalidRequestException( 1165 Msg.code(1197) + "This server does not support _sort specifications of type " 1166 + param.getParamType() + " - Can't serve _sort=" + paramName); 1167 } 1168 } 1169 1170 // Recurse 1171 createSort(theQueryStack, theSort.getChain(), theParams); 1172 } 1173 1174 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1175 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1176 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1177 String msg = myContext 1178 .getLocalizer() 1179 .getMessageSanitized( 1180 BaseStorageDao.class, 1181 "invalidSortParameter", 1182 theParamName, 1183 theResourceName, 1184 validSearchParameterNames); 1185 throw new InvalidRequestException(Msg.code(1194) + msg); 1186 } 1187 1188 private void createCompositeSort( 1189 QueryStack theQueryStack, 1190 RestSearchParameterTypeEnum theParamType, 1191 String theParamName, 1192 boolean theAscending) { 1193 1194 switch (theParamType) { 1195 case STRING: 1196 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1197 break; 1198 case DATE: 1199 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1200 break; 1201 case TOKEN: 1202 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1203 break; 1204 case QUANTITY: 1205 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1206 break; 1207 case NUMBER: 1208 case REFERENCE: 1209 case COMPOSITE: 1210 case URI: 1211 case HAS: 1212 case SPECIAL: 1213 default: 1214 throw new InvalidRequestException( 1215 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1216 + " on _sort=" + theParamName); 1217 } 1218 } 1219 1220 private void doLoadPids( 1221 Collection<JpaPid> thePids, 1222 Collection<JpaPid> theIncludedPids, 1223 List<IBaseResource> theResourceListToPopulate, 1224 boolean theForHistoryOperation, 1225 Map<Long, Integer> thePosition) { 1226 Map<JpaPid, Long> resourcePidToVersion = null; 1227 for (JpaPid next : thePids) { 1228 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1229 if (resourcePidToVersion == null) { 1230 resourcePidToVersion = new HashMap<>(); 1231 } 1232 resourcePidToVersion.put(next, next.getVersion()); 1233 } 1234 } 1235 1236 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1237 if (versionlessPids.size() < getMaximumPageSize()) { 1238 versionlessPids = normalizeIdListForInClause(versionlessPids); 1239 } 1240 1241 // Load the resource bodies 1242 List<ResourceHistoryTable> resourceSearchViewList = 1243 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1244 JpaPidFk.fromPids(versionlessPids)); 1245 1246 /* 1247 * If we have specific versions to load, replace the history entries with the 1248 * correct ones 1249 * 1250 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1251 * version entity first, and by batching the fetches. But this is a fairly infrequently 1252 * used feature, and loading history entities by PK is a very efficient query so it's 1253 * not the end of the world 1254 */ 1255 if (resourcePidToVersion != null) { 1256 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1257 ResourceHistoryTable next = resourceSearchViewList.get(i); 1258 JpaPid resourceId = next.getPersistentId(); 1259 Long version = resourcePidToVersion.get(resourceId); 1260 resourceId.setVersion(version); 1261 if (version != null && !version.equals(next.getVersion())) { 1262 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1263 next.getResourceId().toFk(), version); 1264 resourceSearchViewList.set(i, replacement); 1265 } 1266 } 1267 } 1268 1269 // -- preload all tags with tag definition if any 1270 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1271 1272 for (ResourceHistoryTable next : resourceSearchViewList) { 1273 if (next.getDeleted() != null) { 1274 continue; 1275 } 1276 1277 Class<? extends IBaseResource> resourceType = 1278 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1279 1280 JpaPid resourceId = next.getPersistentId(); 1281 1282 if (resourcePidToVersion != null) { 1283 Long version = resourcePidToVersion.get(resourceId); 1284 resourceId.setVersion(version); 1285 } 1286 1287 IBaseResource resource; 1288 resource = myJpaStorageResourceParser.toResource( 1289 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1290 if (resource == null) { 1291 ourLog.warn( 1292 "Unable to find resource {}/{}/_history/{} in database", 1293 next.getResourceType(), 1294 next.getIdDt().getIdPart(), 1295 next.getVersion()); 1296 continue; 1297 } 1298 1299 Integer index = thePosition.get(resourceId.getId()); 1300 if (index == null) { 1301 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1302 continue; 1303 } 1304 1305 if (theIncludedPids.contains(resourceId)) { 1306 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1307 } else { 1308 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1309 } 1310 1311 // ensure there's enough space; "<=" because of 0-indexing 1312 while (theResourceListToPopulate.size() <= index) { 1313 theResourceListToPopulate.add(null); 1314 } 1315 theResourceListToPopulate.set(index, resource); 1316 } 1317 } 1318 1319 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1320 return switch (myStorageSettings.getTagStorageMode()) { 1321 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1322 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1323 case INLINE -> Map.of(); 1324 }; 1325 } 1326 1327 @Nonnull 1328 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1329 Collection<ResourceHistoryTable> theHistoryTables) { 1330 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1331 1332 // -- find all resource has tags 1333 for (ResourceHistoryTable resource : theHistoryTables) { 1334 if (resource.isHasTags()) { 1335 idList.add(resource.getId()); 1336 } 1337 } 1338 1339 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1340 1341 // -- no tags 1342 if (idList.isEmpty()) { 1343 return tagMap; 1344 } 1345 1346 // -- get all tags for the idList 1347 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1348 1349 // -- build the map, key = resourceId, value = list of ResourceTag 1350 JpaPid resourceId; 1351 Collection<BaseTag> tagCol; 1352 for (ResourceHistoryTag tag : tagList) { 1353 1354 resourceId = tag.getResourcePid(); 1355 tagCol = tagMap.get(resourceId); 1356 if (tagCol == null) { 1357 tagCol = new ArrayList<>(); 1358 tagCol.add(tag); 1359 tagMap.put(resourceId, tagCol); 1360 } else { 1361 tagCol.add(tag); 1362 } 1363 } 1364 1365 return tagMap; 1366 } 1367 1368 @Nonnull 1369 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1370 Collection<ResourceHistoryTable> theHistoryTables) { 1371 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1372 1373 // -- find all resource has tags 1374 for (ResourceHistoryTable resource : theHistoryTables) { 1375 if (resource.isHasTags()) { 1376 idList.add(resource.getResourceId()); 1377 } 1378 } 1379 1380 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1381 1382 // -- no tags 1383 if (idList.isEmpty()) { 1384 return tagMap; 1385 } 1386 1387 // -- get all tags for the idList 1388 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1389 1390 // -- build the map, key = resourceId, value = list of ResourceTag 1391 JpaPid resourceId; 1392 Collection<BaseTag> tagCol; 1393 for (ResourceTag tag : tagList) { 1394 1395 resourceId = tag.getResourceId(); 1396 tagCol = tagMap.get(resourceId); 1397 if (tagCol == null) { 1398 tagCol = new ArrayList<>(); 1399 tagCol.add(tag); 1400 tagMap.put(resourceId, tagCol); 1401 } else { 1402 tagCol.add(tag); 1403 } 1404 } 1405 1406 return tagMap; 1407 } 1408 1409 @Override 1410 public void loadResourcesByPid( 1411 Collection<JpaPid> thePids, 1412 Collection<JpaPid> theIncludedPids, 1413 List<IBaseResource> theResourceListToPopulate, 1414 boolean theForHistoryOperation, 1415 RequestDetails theDetails) { 1416 if (thePids.isEmpty()) { 1417 ourLog.debug("The include pids are empty"); 1418 } 1419 1420 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1421 // when running asserts 1422 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1423 1424 Map<Long, Integer> position = new HashMap<>(); 1425 int index = 0; 1426 for (JpaPid next : thePids) { 1427 position.put(next.getId(), index++); 1428 } 1429 1430 // Can we fast track this loading by checking elastic search? 1431 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1432 if (isUsingElasticSearch) { 1433 try { 1434 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1435 return; 1436 1437 } catch (ResourceNotFoundInIndexException theE) { 1438 // some resources were not found in index, so we will inform this and resort to JPA search 1439 ourLog.warn( 1440 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1441 } 1442 } 1443 1444 // We only chunk because some jdbc drivers can't handle long param lists. 1445 QueryChunker.chunk(thePids, t -> { 1446 doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position); 1447 }); 1448 } 1449 1450 /** 1451 * Check if we can load the resources from Hibernate Search instead of the database. 1452 * We assume this is faster. 1453 * <p> 1454 * Hibernate Search only stores the current version, and only if enabled. 1455 * 1456 * @param thePids the pids to check for versioned references 1457 * @return can we fetch from Hibernate Search? 1458 */ 1459 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1460 // is storage enabled? 1461 return myStorageSettings.isStoreResourceInHSearchIndex() 1462 && myStorageSettings.isHibernateSearchIndexSearchParams() 1463 && 1464 // we don't support history 1465 thePids.stream().noneMatch(p -> p.getVersion() != null) 1466 && 1467 // skip the complexity for metadata in dstu2 1468 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1469 } 1470 1471 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1472 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1473 // only impl 1474 // to handle lastN? 1475 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1476 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1477 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1478 1479 return myFulltextSearchSvc.getResources(pidList); 1480 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1481 // legacy LastN implementation 1482 return myIElasticsearchSvc.getObservationResources(thePids); 1483 } else { 1484 return Collections.emptyList(); 1485 } 1486 } 1487 1488 /** 1489 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1490 * so it can't be Collections.emptySet() or some such thing. 1491 * The JpaPid returned will have resource type populated. 1492 */ 1493 @Override 1494 public Set<JpaPid> loadIncludes( 1495 FhirContext theContext, 1496 EntityManager theEntityManager, 1497 Collection<JpaPid> theMatches, 1498 Collection<Include> theIncludes, 1499 boolean theReverseMode, 1500 DateRangeParam theLastUpdated, 1501 String theSearchIdOrDescription, 1502 RequestDetails theRequest, 1503 Integer theMaxCount) { 1504 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1505 parameters.setFhirContext(theContext); 1506 parameters.setEntityManager(theEntityManager); 1507 parameters.setMatches(theMatches); 1508 parameters.setIncludeFilters(theIncludes); 1509 parameters.setReverseMode(theReverseMode); 1510 parameters.setLastUpdated(theLastUpdated); 1511 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1512 parameters.setRequestDetails(theRequest); 1513 parameters.setMaxCount(theMaxCount); 1514 return loadIncludes(parameters); 1515 } 1516 1517 @Override 1518 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1519 Collection<JpaPid> matches = theParameters.getMatches(); 1520 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1521 boolean reverseMode = theParameters.isReverseMode(); 1522 EntityManager entityManager = theParameters.getEntityManager(); 1523 Integer maxCount = theParameters.getMaxCount(); 1524 FhirContext fhirContext = theParameters.getFhirContext(); 1525 RequestDetails request = theParameters.getRequestDetails(); 1526 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1527 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1528 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1529 IInterceptorBroadcaster compositeBroadcaster = 1530 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1531 1532 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1533 CurrentThreadCaptureQueriesListener.startCapturing(); 1534 } 1535 if (matches.isEmpty()) { 1536 return new HashSet<>(); 1537 } 1538 if (currentIncludes == null || currentIncludes.isEmpty()) { 1539 return new HashSet<>(); 1540 } 1541 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1542 String searchPartitionIdFieldName = 1543 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1544 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1545 String findPartitionIdFieldName = 1546 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1547 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1548 String findVersionFieldName = null; 1549 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1550 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1551 } 1552 1553 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1554 HashSet<JpaPid> allAdded = new HashSet<>(); 1555 HashSet<JpaPid> original = new HashSet<>(matches); 1556 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1557 1558 int roundCounts = 0; 1559 StopWatch w = new StopWatch(); 1560 1561 boolean addedSomeThisRound; 1562 do { 1563 roundCounts++; 1564 1565 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1566 1567 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1568 Include nextInclude = iter.next(); 1569 if (!nextInclude.isRecurse()) { 1570 iter.remove(); 1571 } 1572 1573 // Account for _include=* 1574 boolean matchAll = "*".equals(nextInclude.getValue()); 1575 1576 // Account for _include=[resourceType]:* 1577 String wantResourceType = null; 1578 if (!matchAll) { 1579 if ("*".equals(nextInclude.getParamName())) { 1580 wantResourceType = nextInclude.getParamType(); 1581 matchAll = true; 1582 } 1583 } 1584 1585 if (matchAll) { 1586 loadIncludesMatchAll( 1587 findPidFieldName, 1588 findPartitionIdFieldName, 1589 findResourceTypeFieldName, 1590 findVersionFieldName, 1591 searchPidFieldName, 1592 searchPartitionIdFieldName, 1593 wantResourceType, 1594 reverseMode, 1595 hasDesiredResourceTypes, 1596 nextRoundMatches, 1597 entityManager, 1598 maxCount, 1599 desiredResourceTypes, 1600 pidsToInclude, 1601 request); 1602 } else { 1603 loadIncludesMatchSpecific( 1604 nextInclude, 1605 fhirContext, 1606 findPidFieldName, 1607 findPartitionIdFieldName, 1608 findVersionFieldName, 1609 searchPidFieldName, 1610 reverseMode, 1611 nextRoundMatches, 1612 entityManager, 1613 maxCount, 1614 pidsToInclude, 1615 request); 1616 } 1617 } 1618 1619 nextRoundMatches.clear(); 1620 for (JpaPid next : pidsToInclude) { 1621 if (!original.contains(next) && !allAdded.contains(next)) { 1622 nextRoundMatches.add(next); 1623 } else { 1624 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1625 } 1626 } 1627 1628 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1629 1630 if (maxCount != null && allAdded.size() >= maxCount) { 1631 break; 1632 } 1633 1634 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1635 1636 allAdded.removeAll(original); 1637 1638 ourLog.info( 1639 "Loaded {} {} in {} rounds and {} ms for search {}", 1640 allAdded.size(), 1641 reverseMode ? "_revincludes" : "_includes", 1642 roundCounts, 1643 w.getMillisAndRestart(), 1644 searchIdOrDescription); 1645 1646 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1647 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1648 } 1649 1650 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1651 // This can be used to remove results from the search result details before 1652 // the user has a chance to know that they were in the results 1653 if (!allAdded.isEmpty()) { 1654 1655 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1656 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1657 JpaPreResourceAccessDetails accessDetails = 1658 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1659 HookParams params = new HookParams() 1660 .add(IPreResourceAccessDetails.class, accessDetails) 1661 .add(RequestDetails.class, request) 1662 .addIfMatchesType(ServletRequestDetails.class, request); 1663 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1664 1665 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1666 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1667 JpaPid value = includedPidList.remove(i); 1668 if (value != null) { 1669 allAdded.remove(value); 1670 } 1671 } 1672 } 1673 } 1674 } 1675 1676 return allAdded; 1677 } 1678 1679 private void loadIncludesMatchSpecific( 1680 Include nextInclude, 1681 FhirContext fhirContext, 1682 String findPidFieldName, 1683 String findPartitionFieldName, 1684 String findVersionFieldName, 1685 String searchPidFieldName, 1686 boolean reverseMode, 1687 List<JpaPid> nextRoundMatches, 1688 EntityManager entityManager, 1689 Integer maxCount, 1690 HashSet<JpaPid> pidsToInclude, 1691 RequestDetails theRequest) { 1692 List<String> paths; 1693 1694 // Start replace 1695 RuntimeSearchParam param; 1696 String resType = nextInclude.getParamType(); 1697 if (isBlank(resType)) { 1698 return; 1699 } 1700 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1701 if (def == null) { 1702 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1703 return; 1704 } 1705 1706 String paramName = nextInclude.getParamName(); 1707 if (isNotBlank(paramName)) { 1708 param = mySearchParamRegistry.getActiveSearchParam( 1709 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1710 } else { 1711 param = null; 1712 } 1713 if (param == null) { 1714 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1715 return; 1716 } 1717 1718 paths = param.getPathsSplitForResourceType(resType); 1719 // end replace 1720 1721 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1722 1723 for (String nextPath : paths) { 1724 String findPidFieldSqlColumn = 1725 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1726 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1727 if (findVersionFieldName != null) { 1728 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1729 } 1730 if (myPartitionSettings.isDatabasePartitionMode()) { 1731 fieldsToLoad += ", r."; 1732 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1733 ? "partition_id" 1734 : "target_res_partition_id"; 1735 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1736 } 1737 1738 // Query for includes lookup has 2 cases 1739 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1740 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1741 // url in target_resource_url 1742 1743 // Case 1: 1744 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1745 1746 String searchPidFieldSqlColumn = 1747 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1748 StringBuilder localReferenceQuery = new StringBuilder(); 1749 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1750 localReferenceQuery.append(" FROM hfj_res_link r "); 1751 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1752 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1753 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1754 } 1755 localReferenceQuery 1756 .append(" AND r.") 1757 .append(searchPidFieldSqlColumn) 1758 .append(" IN (:target_pids) "); 1759 if (myPartitionSettings.isDatabasePartitionMode()) { 1760 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1761 ? "target_res_partition_id" 1762 : "partition_id"; 1763 localReferenceQuery 1764 .append("AND r.") 1765 .append(partitionFieldToSearch) 1766 .append(" = :search_partition_id "); 1767 } 1768 localReferenceQueryParams.put("src_path", nextPath); 1769 // we loop over target_pids later. 1770 if (targetResourceTypes != null) { 1771 if (targetResourceTypes.size() == 1) { 1772 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1773 localReferenceQueryParams.put( 1774 "target_resource_type", 1775 targetResourceTypes.iterator().next()); 1776 } else { 1777 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1778 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1779 } 1780 } 1781 1782 // Case 2: 1783 Pair<String, Map<String, Object>> canonicalQuery = 1784 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1785 1786 String sql = localReferenceQuery.toString(); 1787 if (canonicalQuery != null) { 1788 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1789 } 1790 1791 Map<String, Object> limitParams = new HashMap<>(); 1792 if (maxCount != null) { 1793 LinkedList<Object> bindVariables = new LinkedList<>(); 1794 sql = SearchQueryBuilder.applyLimitToSql( 1795 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1796 1797 // The dialect SQL limiter uses positional params, but we're using 1798 // named params here, so we need to replace the positional params 1799 // with equivalent named ones 1800 StringBuilder sb = new StringBuilder(); 1801 for (int i = 0; i < sql.length(); i++) { 1802 char nextChar = sql.charAt(i); 1803 if (nextChar == '?') { 1804 String nextName = "limit" + i; 1805 sb.append(':').append(nextName); 1806 limitParams.put(nextName, bindVariables.removeFirst()); 1807 } else { 1808 sb.append(nextChar); 1809 } 1810 } 1811 sql = sb.toString(); 1812 } 1813 1814 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1815 for (Collection<JpaPid> nextPartition : partitions) { 1816 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1817 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1818 if (myPartitionSettings.isDatabasePartitionMode()) { 1819 q.setParameter( 1820 "search_partition_id", 1821 nextPartition.iterator().next().getPartitionId()); 1822 } 1823 localReferenceQueryParams.forEach(q::setParameter); 1824 if (canonicalQuery != null) { 1825 canonicalQuery.getRight().forEach(q::setParameter); 1826 } 1827 limitParams.forEach(q::setParameter); 1828 1829 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1830 Tuple result; 1831 while (iter.hasNext()) { 1832 result = iter.next(); 1833 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1834 Long resourceVersion = null; 1835 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1836 resourceVersion = 1837 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1838 } 1839 Integer partitionId = null; 1840 if (myPartitionSettings.isDatabasePartitionMode()) { 1841 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1842 } 1843 1844 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1845 pid.setPartitionId(partitionId); 1846 pidsToInclude.add(pid); 1847 } 1848 } 1849 // myEntityManager.clear(); 1850 } 1851 } 1852 } 1853 1854 private void loadIncludesMatchAll( 1855 String findPidFieldName, 1856 String findPartitionFieldName, 1857 String findResourceTypeFieldName, 1858 String findVersionFieldName, 1859 String searchPidFieldName, 1860 String searchPartitionFieldName, 1861 String wantResourceType, 1862 boolean reverseMode, 1863 boolean hasDesiredResourceTypes, 1864 List<JpaPid> nextRoundMatches, 1865 EntityManager entityManager, 1866 Integer maxCount, 1867 List<String> desiredResourceTypes, 1868 HashSet<JpaPid> pidsToInclude, 1869 RequestDetails request) { 1870 1871 record IncludesRecord( 1872 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1873 1874 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1875 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1876 Root<ResourceLink> root = query.from(ResourceLink.class); 1877 1878 List<Selection<?>> selectionList = new ArrayList<>(); 1879 selectionList.add(root.get(findPidFieldName)); 1880 selectionList.add(root.get(findResourceTypeFieldName)); 1881 selectionList.add(root.get("myTargetResourceUrl")); 1882 if (findVersionFieldName != null) { 1883 selectionList.add(root.get(findVersionFieldName)); 1884 } else { 1885 selectionList.add(cb.nullLiteral(Long.class)); 1886 } 1887 if (myPartitionSettings.isDatabasePartitionMode()) { 1888 selectionList.add(root.get(findPartitionFieldName)); 1889 } else { 1890 selectionList.add(cb.nullLiteral(Integer.class)); 1891 } 1892 query.multiselect(selectionList); 1893 1894 List<Predicate> predicates = new ArrayList<>(); 1895 1896 if (myPartitionSettings.isDatabasePartitionMode()) { 1897 predicates.add( 1898 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 1899 } 1900 1901 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 1902 1903 /* 1904 * We need to set the resource type in 2 cases only: 1905 * 1) we are in $everything mode 1906 * (where we only want to fetch specific resource types, regardless of what is 1907 * available to fetch) 1908 * 2) we are doing revincludes 1909 * 1910 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1911 * should always be checking the source resource type on the resource link. We don't 1912 * actually index that column though by default, so in order to try and be efficient 1913 * we don't actually include it for includes (but we do for revincludes). This is 1914 * because for an include, it doesn't really make sense to include a different 1915 * resource type than the one you are searching on. 1916 */ 1917 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1918 // because mySourceResourceType is not part of the HFJ_RES_LINK 1919 // index, this might not be the most optimal performance. 1920 // but it is for an $everything operation (and maybe we should update the index) 1921 predicates.add( 1922 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 1923 } else { 1924 wantResourceType = null; 1925 } 1926 1927 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1928 // resources 1929 // (e.g. via Provenance, List, or Group) when in an $everything operation 1930 if (myParams != null 1931 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1932 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 1933 predicates.add(cb.not(root.get("mySourceResourceType") 1934 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 1935 } 1936 1937 if (hasDesiredResourceTypes) { 1938 predicates.add( 1939 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 1940 } 1941 1942 query.where(cb.and(predicates.toArray(new Predicate[0]))); 1943 1944 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1945 for (Collection<JpaPid> nextPartition : partitions) { 1946 1947 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 1948 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1949 if (myPartitionSettings.isDatabasePartitionMode()) { 1950 q.setParameter( 1951 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1952 } 1953 if (wantResourceType != null) { 1954 q.setParameter("want_resource_type", wantResourceType); 1955 } 1956 if (maxCount != null) { 1957 q.setMaxResults(maxCount); 1958 } 1959 if (hasDesiredResourceTypes) { 1960 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1961 } 1962 1963 Set<String> canonicalUrls = null; 1964 1965 try (ScrollableResultsIterator<IncludesRecord> iter = 1966 new ScrollableResultsIterator<>(toScrollableResults(q))) { 1967 IncludesRecord nextRow; 1968 while (iter.hasNext()) { 1969 nextRow = iter.next(); 1970 if (nextRow == null) { 1971 // This can happen if there are outgoing references which are canonical or point to 1972 // other servers 1973 continue; 1974 } 1975 1976 Long version = nextRow.version; 1977 Long resourceId = nextRow.resourceId; 1978 String resourceType = nextRow.resourceType; 1979 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 1980 Integer partitionId = nextRow.partitionId; 1981 1982 if (resourceId != null) { 1983 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1984 pid.setPartitionId(partitionId); 1985 pidsToInclude.add(pid); 1986 } else if (resourceCanonicalUrl != null) { 1987 if (canonicalUrls == null) { 1988 canonicalUrls = new HashSet<>(); 1989 } 1990 canonicalUrls.add(resourceCanonicalUrl); 1991 } 1992 } 1993 } 1994 1995 if (canonicalUrls != null) { 1996 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1997 } 1998 } 1999 } 2000 2001 private void loadCanonicalUrls( 2002 RequestDetails theRequestDetails, 2003 Set<String> theCanonicalUrls, 2004 EntityManager theEntityManager, 2005 HashSet<JpaPid> thePidsToInclude, 2006 boolean theReverse) { 2007 StringBuilder sqlBuilder; 2008 CanonicalUrlTargets canonicalUrlTargets = 2009 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2010 if (canonicalUrlTargets.isEmpty()) { 2011 return; 2012 } 2013 2014 String message = 2015 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2016 firePerformanceWarning(theRequestDetails, message); 2017 2018 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2019 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2020 2021 sqlBuilder = new StringBuilder(); 2022 sqlBuilder.append("SELECT "); 2023 if (myPartitionSettings.isPartitioningEnabled()) { 2024 sqlBuilder.append("i.myPartitionIdValue, "); 2025 } 2026 sqlBuilder.append("i.myResourcePid "); 2027 2028 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2029 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2030 sqlBuilder.append("AND i.myUri IN (:uris)"); 2031 2032 String canonicalResSql = sqlBuilder.toString(); 2033 2034 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2035 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2036 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2037 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2038 List<Object[]> results = canonicalResIdQuery.getResultList(); 2039 for (var next : results) { 2040 if (next != null) { 2041 Integer partitionId = null; 2042 Long pid; 2043 if (next.length == 1) { 2044 pid = (Long) next[0]; 2045 } else { 2046 partitionId = (Integer) ((Object[]) next)[0]; 2047 pid = (Long) ((Object[]) next)[1]; 2048 } 2049 if (pid != null) { 2050 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2051 } 2052 } 2053 } 2054 } 2055 } 2056 2057 /** 2058 * Calls Performance Trace Hook 2059 * 2060 * @param request the request deatils 2061 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2062 */ 2063 private void callRawSqlHookWithCurrentThreadQueries( 2064 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2065 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2066 HookParams params = new HookParams() 2067 .add(RequestDetails.class, request) 2068 .addIfMatchesType(ServletRequestDetails.class, request) 2069 .add(SqlQueryList.class, capturedQueries); 2070 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2071 } 2072 2073 @Nullable 2074 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2075 String targetResourceType = nextInclude.getParamTargetType(); 2076 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2077 Set<String> targetResourceTypes; 2078 if (targetResourceType != null) { 2079 targetResourceTypes = Set.of(targetResourceType); 2080 } else if (haveTargetTypesDefinedByParam) { 2081 targetResourceTypes = param.getTargets(); 2082 } else { 2083 // all types! 2084 targetResourceTypes = null; 2085 } 2086 return targetResourceTypes; 2087 } 2088 2089 @Nullable 2090 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2091 String theVersionFieldName, 2092 Set<String> theTargetResourceTypes, 2093 boolean theReverse, 2094 RequestDetails theRequest, 2095 RuntimeSearchParam theParam) { 2096 2097 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2098 2099 // If we know for sure that none of the paths involved in this SearchParameter could 2100 // be indexing a canonical 2101 if (Arrays.stream(searchParameterPaths) 2102 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2103 myContext, myResourceName, t, theReverse))) { 2104 return null; 2105 } 2106 2107 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2108 if (theVersionFieldName != null) { 2109 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2110 fieldsToLoadFromSpidxUriTable += ", NULL"; 2111 } 2112 2113 if (myPartitionSettings.isDatabasePartitionMode()) { 2114 if (theReverse) { 2115 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2116 } else { 2117 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2118 } 2119 } 2120 2121 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2122 // But sp_name isn't indexed, so we use hash_identity instead. 2123 CanonicalUrlTargets canonicalUrlTargets = 2124 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2125 if (canonicalUrlTargets.isEmpty()) { 2126 return null; 2127 } 2128 2129 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2130 StringBuilder canonicalUrlQuery = new StringBuilder(); 2131 canonicalUrlQuery 2132 .append("SELECT ") 2133 .append(fieldsToLoadFromSpidxUriTable) 2134 .append(' '); 2135 canonicalUrlQuery.append("FROM hfj_res_link r "); 2136 2137 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2138 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2139 if (myPartitionSettings.isDatabasePartitionMode()) { 2140 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2141 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2142 } 2143 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2144 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2145 canonicalUriQueryParams.put( 2146 "uri_identity_hash", 2147 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2148 } else { 2149 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2150 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2151 } 2152 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2153 canonicalUrlQuery.append(")"); 2154 2155 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2156 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2157 canonicalUrlQuery.append(" AND"); 2158 if (myPartitionSettings.isDatabasePartitionMode()) { 2159 if (theReverse) { 2160 canonicalUrlQuery.append(" rUri.partition_id"); 2161 } else { 2162 canonicalUrlQuery.append(" r.partition_id"); 2163 } 2164 canonicalUrlQuery.append(" = :search_partition_id"); 2165 canonicalUrlQuery.append(" AND"); 2166 } 2167 if (theReverse) { 2168 canonicalUrlQuery.append(" rUri.res_id"); 2169 } else { 2170 canonicalUrlQuery.append(" r.src_resource_id"); 2171 } 2172 canonicalUrlQuery.append(" IN (:target_pids)"); 2173 2174 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2175 } 2176 2177 @Nonnull 2178 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2179 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2180 Set<String> targetResourceTypes = theTargetResourceTypes; 2181 if (targetResourceTypes == null) { 2182 /* 2183 * If we don't have a list of valid target types, we need to figure out a list of all 2184 * possible target types in order to perform the search of the URI index table. This is 2185 * because the hash_identity column encodes the resource type, so we'll need a hash 2186 * value for each possible target type. 2187 */ 2188 targetResourceTypes = new HashSet<>(); 2189 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2190 if (theReverse) { 2191 // For reverse includes, it is really hard to figure out what types 2192 // are actually potentially pointing to the type we're searching for 2193 // in this context, so let's just assume it could be anything. 2194 targetResourceTypes = possibleTypes; 2195 } else { 2196 List<RuntimeSearchParam> params = mySearchParamRegistry 2197 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2198 .values() 2199 .stream() 2200 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2201 .toList(); 2202 for (var next : params) { 2203 2204 String paths = next.getPath(); 2205 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2206 2207 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2208 myContext, myResourceName, path, theReverse)) { 2209 continue; 2210 } 2211 2212 if (!next.getTargets().isEmpty()) { 2213 // For each reference parameter on the resource type we're searching for, 2214 // add all the potential target types to the list of possible target 2215 // resource types we can look up. 2216 for (var nextTarget : next.getTargets()) { 2217 if (possibleTypes.contains(nextTarget)) { 2218 targetResourceTypes.add(nextTarget); 2219 } 2220 } 2221 } else { 2222 // If we have any references that don't define any target types, then 2223 // we need to assume that all enabled resource types are possible target 2224 // types 2225 targetResourceTypes.addAll(possibleTypes); 2226 break; 2227 } 2228 } 2229 } 2230 } 2231 } 2232 2233 if (targetResourceTypes.isEmpty()) { 2234 return new CanonicalUrlTargets(Set.of(), Set.of()); 2235 } 2236 2237 Set<Long> hashIdentityValues = new HashSet<>(); 2238 Set<Integer> partitionIds = new HashSet<>(); 2239 for (String type : targetResourceTypes) { 2240 2241 RequestPartitionId readPartition; 2242 if (myPartitionSettings.isPartitioningEnabled()) { 2243 readPartition = 2244 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2245 } else { 2246 readPartition = RequestPartitionId.defaultPartition(); 2247 } 2248 if (readPartition.hasPartitionIds()) { 2249 partitionIds.addAll(readPartition.getPartitionIds()); 2250 } 2251 2252 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2253 myPartitionSettings, readPartition, type, "url"); 2254 hashIdentityValues.add(hashIdentity); 2255 } 2256 2257 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2258 } 2259 2260 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2261 public boolean isEmpty() { 2262 return hashIdentityValues.isEmpty(); 2263 } 2264 } 2265 2266 /** 2267 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2268 * those pids where: 2269 * <ul> 2270 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2271 * <li>Each list only contains JpaPids with the same partition ID</li> 2272 * </ul> 2273 */ 2274 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2275 2276 if (theNextRoundMatches.size() <= theMaxLoad) { 2277 boolean allSamePartition = true; 2278 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2279 if (!Objects.equals( 2280 theNextRoundMatches.get(i - 1).getPartitionId(), 2281 theNextRoundMatches.get(i).getPartitionId())) { 2282 allSamePartition = false; 2283 break; 2284 } 2285 } 2286 if (allSamePartition) { 2287 return Collections.singletonList(theNextRoundMatches); 2288 } 2289 } 2290 2291 // Break into partitioned sublists 2292 ListMultimap<String, JpaPid> lists = 2293 MultimapBuilder.hashKeys().arrayListValues().build(); 2294 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2295 String partitionId = nextRoundMatch.getPartitionId() != null 2296 ? nextRoundMatch.getPartitionId().toString() 2297 : ""; 2298 lists.put(partitionId, nextRoundMatch); 2299 } 2300 2301 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2302 for (String key : lists.keySet()) { 2303 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2304 retVal.addAll(nextPartition); 2305 } 2306 2307 // In unit test mode, we sort the results just for unit test predictability 2308 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2309 retVal = retVal.stream() 2310 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2311 .collect(Collectors.toList()); 2312 } 2313 2314 return retVal; 2315 } 2316 2317 private void attemptComboUniqueSpProcessing( 2318 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2319 RuntimeSearchParam comboParam = null; 2320 List<String> comboParamNames = null; 2321 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2322 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2323 if (!exactMatchParams.isEmpty()) { 2324 comboParam = exactMatchParams.get(0); 2325 comboParamNames = new ArrayList<>(theParams.keySet()); 2326 } 2327 2328 if (comboParam == null) { 2329 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2330 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2331 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2332 List<String> nextCandidateParamNames = 2333 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2334 .map(RuntimeSearchParam::getName) 2335 .collect(Collectors.toList()); 2336 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2337 comboParam = nextCandidate; 2338 comboParamNames = nextCandidateParamNames; 2339 break; 2340 } 2341 } 2342 } 2343 2344 if (comboParam != null) { 2345 Collections.sort(comboParamNames); 2346 2347 // Since we're going to remove elements below 2348 theParams.values().forEach(this::ensureSubListsAreWritable); 2349 2350 /* 2351 * Apply search against the combo param index in a loop: 2352 * 2353 * 1. First we check whether the actual parameter values in the 2354 * parameter map are actually usable for searching against the combo 2355 * param index. E.g. no search modifiers, date comparators, etc., 2356 * since these mean you can't use the combo index. 2357 * 2358 * 2. Apply and create the join SQl. We remove parameter values from 2359 * the map as we apply them, so any parameter values remaining in the 2360 * map after each loop haven't yet been factored into the SQL. 2361 * 2362 * The loop allows us to create multiple combo index joins if there 2363 * are multiple AND expressions for the related parameters. 2364 */ 2365 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2366 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2367 } 2368 } 2369 } 2370 2371 private void applyComboSearchParam( 2372 QueryStack theQueryStack, 2373 @Nonnull SearchParameterMap theParams, 2374 RequestDetails theRequest, 2375 List<String> theComboParamNames, 2376 RuntimeSearchParam theComboParam) { 2377 2378 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2379 for (String nextParamName : theComboParamNames) { 2380 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2381 inputs.add(nextValues); 2382 } 2383 2384 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2385 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2386 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2387 2388 StringBuilder searchStringBuilder = new StringBuilder(); 2389 searchStringBuilder.append(myResourceName); 2390 searchStringBuilder.append("?"); 2391 2392 boolean first = true; 2393 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2394 2395 String nextParamName = theComboParamNames.get(paramIndex); 2396 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2397 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2398 // As a result, we strip the prefix if present. 2399 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2400 2401 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2402 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2403 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2404 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2405 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2406 } 2407 } 2408 2409 if (first) { 2410 first = false; 2411 } else { 2412 searchStringBuilder.append('&'); 2413 } 2414 2415 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2416 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2417 2418 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2419 } 2420 2421 String indexString = searchStringBuilder.toString(); 2422 ourLog.debug( 2423 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2424 2425 indexStrings.add(indexString); 2426 } 2427 2428 // Just to make sure we're stable for tests 2429 indexStrings.sort(Comparator.naturalOrder()); 2430 2431 // Interceptor broadcast: JPA_PERFTRACE_INFO 2432 IInterceptorBroadcaster compositeBroadcaster = 2433 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2434 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2435 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2436 StorageProcessingMessage msg = new StorageProcessingMessage() 2437 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2438 + indexStringForLog); 2439 HookParams params = new HookParams() 2440 .add(RequestDetails.class, theRequest) 2441 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2442 .add(StorageProcessingMessage.class, msg); 2443 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2444 } 2445 2446 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2447 case UNIQUE: 2448 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2449 break; 2450 case NON_UNIQUE: 2451 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2452 break; 2453 } 2454 2455 // Remove any empty parameters remaining after this 2456 theParams.clean(); 2457 } 2458 2459 /** 2460 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2461 * searching against a combo param with the given parameter names. This might be {@literal false} if 2462 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2463 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2464 */ 2465 private boolean validateParamValuesAreValidForComboParam( 2466 RequestDetails theRequest, 2467 @Nonnull SearchParameterMap theParams, 2468 List<String> theComboParamNames, 2469 RuntimeSearchParam theComboParam) { 2470 boolean paramValuesAreValidForCombo = true; 2471 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2472 2473 for (String nextParamName : theComboParamNames) { 2474 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2475 2476 if (nextValues == null || nextValues.isEmpty()) { 2477 paramValuesAreValidForCombo = false; 2478 break; 2479 } 2480 2481 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2482 paramOrValues.add(nextAndValue); 2483 2484 for (IQueryParameterType nextOrValue : nextAndValue) { 2485 if (nextOrValue instanceof DateParam dateParam) { 2486 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2487 String message = "Search with params " + theComboParamNames 2488 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2489 + nextParamName + "'"; 2490 firePerformanceInfo(theRequest, message); 2491 paramValuesAreValidForCombo = false; 2492 break; 2493 } 2494 } 2495 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2496 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2497 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2498 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2499 if (prefix != null && prefix != EQUAL) { 2500 String message = "Search with params " + theComboParamNames 2501 + " is not a candidate for combo searching - Parameter '" + nextParamName 2502 + "' has prefix: '" 2503 + paramWithPrefix.getPrefix().getValue() + "'"; 2504 firePerformanceInfo(theRequest, message); 2505 paramValuesAreValidForCombo = false; 2506 break; 2507 } 2508 } 2509 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2510 String message = "Search with params " + theComboParamNames 2511 + " is not a candidate for combo searching - Parameter '" + nextParamName 2512 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2513 firePerformanceInfo(theRequest, message); 2514 paramValuesAreValidForCombo = false; 2515 break; 2516 } 2517 } 2518 2519 // Reference params are only eligible for using a composite index if they 2520 // are qualified 2521 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2522 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2523 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2524 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2525 if (isBlank(param.getResourceType())) { 2526 ourLog.debug( 2527 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2528 paramValuesAreValidForCombo = false; 2529 break; 2530 } 2531 } 2532 2533 // Date params are not eligible for using composite unique index 2534 // as index could contain date with different precision (e.g. DAY, SECOND) 2535 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2536 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2537 ourLog.debug( 2538 "Search with params {} is not a candidate for combo searching - " 2539 + "Unique combo search parameter '{}' has DATE type", 2540 theComboParamNames, 2541 nextParamName); 2542 paramValuesAreValidForCombo = false; 2543 break; 2544 } 2545 } 2546 2547 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2548 ourLog.debug( 2549 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2550 paramValuesAreValidForCombo = false; 2551 } 2552 2553 return paramValuesAreValidForCombo; 2554 } 2555 2556 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2557 for (int i = 0; i < theListOfLists.size(); i++) { 2558 List<T> oldSubList = theListOfLists.get(i); 2559 if (!(oldSubList instanceof ArrayList)) { 2560 List<T> newSubList = new ArrayList<>(oldSubList); 2561 theListOfLists.set(i, newSubList); 2562 } 2563 } 2564 } 2565 2566 @Override 2567 public void setFetchSize(int theFetchSize) { 2568 myFetchSize = theFetchSize; 2569 } 2570 2571 public SearchParameterMap getParams() { 2572 return myParams; 2573 } 2574 2575 public CriteriaBuilder getBuilder() { 2576 return myCriteriaBuilder; 2577 } 2578 2579 public Class<? extends IBaseResource> getResourceType() { 2580 return myResourceType; 2581 } 2582 2583 public String getResourceName() { 2584 return myResourceName; 2585 } 2586 2587 /** 2588 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2589 */ 2590 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2591 2592 private final RequestDetails myRequest; 2593 private final Set<JpaPid> myCurrentPids; 2594 private Iterator<JpaPid> myCurrentIterator; 2595 private JpaPid myNext; 2596 2597 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2598 myCurrentPids = new HashSet<>(thePidSet); 2599 myCurrentIterator = null; 2600 myRequest = theRequest; 2601 } 2602 2603 private void fetchNext() { 2604 while (myNext == null) { 2605 2606 if (myCurrentIterator == null) { 2607 Set<Include> includes = new HashSet<>(); 2608 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2609 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2610 for (IQueryParameterType type : typeList) { 2611 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2612 for (String resourceType : queryString.split(",")) { 2613 String rt = resourceType.trim(); 2614 if (isNotBlank(rt)) { 2615 includes.add(new Include(rt + ":*", true)); 2616 } 2617 } 2618 } 2619 } 2620 } 2621 if (includes.isEmpty()) { 2622 includes.add(new Include("*", true)); 2623 } 2624 Set<JpaPid> newPids = loadIncludes( 2625 myContext, 2626 myEntityManager, 2627 myCurrentPids, 2628 includes, 2629 false, 2630 getParams().getLastUpdated(), 2631 mySearchUuid, 2632 myRequest, 2633 null); 2634 myCurrentIterator = newPids.iterator(); 2635 } 2636 2637 if (myCurrentIterator.hasNext()) { 2638 myNext = myCurrentIterator.next(); 2639 } else { 2640 myNext = NO_MORE; 2641 } 2642 } 2643 } 2644 2645 @Override 2646 public boolean hasNext() { 2647 fetchNext(); 2648 return !NO_MORE.equals(myNext); 2649 } 2650 2651 @Override 2652 public JpaPid next() { 2653 fetchNext(); 2654 JpaPid retVal = myNext; 2655 myNext = null; 2656 return retVal; 2657 } 2658 } 2659 /** 2660 * Basic Query iterator, used to fetch the results of a query. 2661 */ 2662 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2663 2664 private final SearchRuntimeDetails mySearchRuntimeDetails; 2665 2666 private final RequestDetails myRequest; 2667 private final boolean myHaveRawSqlHooks; 2668 private final boolean myHavePerfTraceFoundIdHook; 2669 private final SortSpec mySort; 2670 private final Integer myOffset; 2671 private final IInterceptorBroadcaster myCompositeBroadcaster; 2672 private boolean myFirst = true; 2673 private IncludesIterator myIncludesIterator; 2674 /** 2675 * The next JpaPid value of the next result in this query. 2676 * Will not be null if fetched using getNext() 2677 */ 2678 private JpaPid myNext; 2679 /** 2680 * The current query result iterator running sql and supplying PIDs 2681 * @see #myQueryList 2682 */ 2683 private ISearchQueryExecutor myResultsIterator; 2684 2685 private boolean myFetchIncludesForEverythingOperation; 2686 2687 /** 2688 * The count of resources skipped because they were seen in earlier results 2689 */ 2690 private int mySkipCount = 0; 2691 /** 2692 * The count of resources that are new in this search 2693 * (ie, not cached in previous searches) 2694 */ 2695 private int myNonSkipCount = 0; 2696 /** 2697 * The list of queries to use to find all results. 2698 * Normal JPA queries will normally have a single entry. 2699 * Queries that involve Hibernate Search/Elasticsearch may have 2700 * multiple queries because of chunking. 2701 * The $everything operation also jams some extra results in. 2702 */ 2703 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2704 2705 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2706 mySearchRuntimeDetails = theSearchRuntimeDetails; 2707 mySort = myParams.getSort(); 2708 myOffset = myParams.getOffset(); 2709 myRequest = theRequest; 2710 myCompositeBroadcaster = 2711 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2712 2713 // everything requires fetching recursively all related resources 2714 if (myParams.getEverythingMode() != null) { 2715 myFetchIncludesForEverythingOperation = true; 2716 } 2717 2718 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2719 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2720 } 2721 2722 private void fetchNext() { 2723 try { 2724 if (myHaveRawSqlHooks) { 2725 CurrentThreadCaptureQueriesListener.startCapturing(); 2726 } 2727 2728 // If we don't have a query yet, create one 2729 if (myResultsIterator == null) { 2730 if (!mySearchProperties.hasMaxResultsRequested()) { 2731 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2732 } 2733 2734 /* 2735 * assigns the results iterator 2736 * and populates the myQueryList. 2737 */ 2738 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2739 } 2740 2741 if (myNext == null) { 2742 // no next means we need a new query (if one is available) 2743 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2744 /* 2745 * Because we combine our DB searches with Lucene 2746 * sometimes we can have multiple results iterators 2747 * (with only some having data in them to extract). 2748 * 2749 * We'll iterate our results iterators until we 2750 * either run out of results iterators, or we 2751 * have one that actually has data in it. 2752 */ 2753 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2754 retrieveNextIteratorQuery(); 2755 } 2756 2757 if (!myResultsIterator.hasNext()) { 2758 // we couldn't find a results iterator; 2759 // we're done here 2760 break; 2761 } 2762 2763 JpaPid nextPid = myResultsIterator.next(); 2764 if (myHavePerfTraceFoundIdHook) { 2765 callPerformanceTracingHook(nextPid); 2766 } 2767 2768 if (nextPid != null) { 2769 if (!myPidSet.contains(nextPid)) { 2770 if (!mySearchProperties.isDeduplicateInDatabase()) { 2771 /* 2772 * We only add to the map if we aren't fetching "everything"; 2773 * otherwise, we let the de-duplication happen in the database 2774 * (see createChunkedQueryNormalSearch above), because it 2775 * saves memory that way. 2776 */ 2777 myPidSet.add(nextPid); 2778 } 2779 if (doNotSkipNextPidForEverything()) { 2780 myNext = nextPid; 2781 myNonSkipCount++; 2782 break; 2783 } 2784 } else { 2785 mySkipCount++; 2786 } 2787 } 2788 2789 if (!myResultsIterator.hasNext()) { 2790 if (mySearchProperties.hasMaxResultsRequested() 2791 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2792 if (mySkipCount > 0 && myNonSkipCount == 0) { 2793 sendProcessingMsgAndFirePerformanceHook(); 2794 // need the next iterator; increase the maxsize 2795 // (we should always do this) 2796 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2797 mySearchProperties.setMaxResultsRequested(maxResults); 2798 2799 if (!mySearchProperties.isDeduplicateInDatabase()) { 2800 // if we're not using the database to deduplicate 2801 // we should recheck our memory usage 2802 // the prefetch size check is future proofing 2803 int prefetchSize = myStorageSettings 2804 .getSearchPreFetchThresholds() 2805 .size(); 2806 if (prefetchSize > 0) { 2807 if (myStorageSettings 2808 .getSearchPreFetchThresholds() 2809 .get(prefetchSize - 1) 2810 < mySearchProperties.getMaxResultsRequested()) { 2811 mySearchProperties.setDeduplicateInDatabase(true); 2812 } 2813 } 2814 } 2815 2816 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2817 } 2818 } 2819 } 2820 } 2821 } 2822 2823 if (myNext == null) { 2824 // if we got here, it means the current JpaPid has already been processed, 2825 // and we will decide (here) if we need to fetch related resources recursively 2826 if (myFetchIncludesForEverythingOperation) { 2827 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2828 myFetchIncludesForEverythingOperation = false; 2829 } 2830 if (myIncludesIterator != null) { 2831 while (myIncludesIterator.hasNext()) { 2832 JpaPid next = myIncludesIterator.next(); 2833 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2834 myNext = next; 2835 break; 2836 } 2837 } 2838 if (myNext == null) { 2839 myNext = NO_MORE; 2840 } 2841 } else { 2842 myNext = NO_MORE; 2843 } 2844 } 2845 2846 if (!mySearchProperties.hasMaxResultsRequested()) { 2847 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2848 } else { 2849 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2850 } 2851 2852 } finally { 2853 // search finished - fire hooks 2854 if (myHaveRawSqlHooks) { 2855 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2856 } 2857 } 2858 2859 if (myFirst) { 2860 HookParams params = new HookParams() 2861 .add(RequestDetails.class, myRequest) 2862 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2863 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2864 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2865 myFirst = false; 2866 } 2867 2868 if (NO_MORE.equals(myNext)) { 2869 HookParams params = new HookParams() 2870 .add(RequestDetails.class, myRequest) 2871 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2872 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2873 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2874 } 2875 } 2876 2877 private Integer calculateMaxResultsToFetch() { 2878 if (myParams.getLoadSynchronousUpTo() != null) { 2879 return myParams.getLoadSynchronousUpTo(); 2880 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2881 return myParams.getEverythingMode() != null 2882 ? myParams.getOffset() + myParams.getCount() 2883 : myParams.getCount(); 2884 } else { 2885 return myStorageSettings.getFetchSizeDefaultMaximum(); 2886 } 2887 } 2888 2889 private boolean doNotSkipNextPidForEverything() { 2890 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2891 } 2892 2893 private void callPerformanceTracingHook(JpaPid theNextPid) { 2894 HookParams params = new HookParams() 2895 .add(Integer.class, System.identityHashCode(this)) 2896 .add(Object.class, theNextPid); 2897 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2898 } 2899 2900 private void sendProcessingMsgAndFirePerformanceHook() { 2901 String msg = "Pass completed with no matching results seeking rows " 2902 + myPidSet.size() + "-" + mySkipCount 2903 + ". This indicates an inefficient query! Retrying with new max count of " 2904 + mySearchProperties.getMaxResultsRequested(); 2905 firePerformanceWarning(myRequest, msg); 2906 } 2907 2908 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2909 Integer offset = theOffset; 2910 if (myQueryList.isEmpty()) { 2911 // Capture times for Lucene/Elasticsearch queries as well 2912 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2913 2914 // setting offset to 0 to fetch all resource ids to guarantee 2915 // correct output result for everything operation during paging 2916 if (myParams.getEverythingMode() != null) { 2917 offset = 0; 2918 } 2919 2920 SearchQueryProperties properties = mySearchProperties.clone(); 2921 properties 2922 .setOffset(offset) 2923 .setMaxResultsRequested(theMaxResultsToFetch) 2924 .setDoCountOnlyFlag(false) 2925 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2926 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2927 } 2928 2929 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2930 2931 retrieveNextIteratorQuery(); 2932 2933 mySkipCount = 0; 2934 myNonSkipCount = 0; 2935 } 2936 2937 private void retrieveNextIteratorQuery() { 2938 close(); 2939 if (isNotEmpty(myQueryList)) { 2940 myResultsIterator = myQueryList.remove(0); 2941 myHasNextIteratorQuery = true; 2942 } else { 2943 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2944 myHasNextIteratorQuery = false; 2945 } 2946 } 2947 2948 @Override 2949 public boolean hasNext() { 2950 if (myNext == null) { 2951 fetchNext(); 2952 } 2953 return !NO_MORE.equals(myNext); 2954 } 2955 2956 @Override 2957 public JpaPid next() { 2958 fetchNext(); 2959 JpaPid retVal = myNext; 2960 myNext = null; 2961 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2962 return retVal; 2963 } 2964 2965 @Override 2966 public int getSkippedCount() { 2967 return mySkipCount; 2968 } 2969 2970 @Override 2971 public int getNonSkippedCount() { 2972 return myNonSkipCount; 2973 } 2974 2975 @Override 2976 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2977 Collection<JpaPid> batch = new ArrayList<>(); 2978 while (this.hasNext() && batch.size() < theBatchSize) { 2979 batch.add(this.next()); 2980 } 2981 return batch; 2982 } 2983 2984 @Override 2985 public void close() { 2986 if (myResultsIterator != null) { 2987 myResultsIterator.close(); 2988 } 2989 myResultsIterator = null; 2990 } 2991 } 2992 2993 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2994 // Only log at debug level since these messages aren't considered important enough 2995 // that we should be cluttering the system log, but they are important to the 2996 // specific query being executed to we'll INFO level them there 2997 ourLog.debug(theMessage); 2998 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2999 } 3000 3001 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3002 ourLog.warn(theMessage); 3003 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3004 } 3005 3006 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3007 IInterceptorBroadcaster compositeBroadcaster = 3008 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3009 if (compositeBroadcaster.hasHooks(thePointcut)) { 3010 StorageProcessingMessage message = new StorageProcessingMessage(); 3011 message.setMessage(theMessage); 3012 HookParams params = new HookParams() 3013 .add(RequestDetails.class, theRequest) 3014 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3015 .add(StorageProcessingMessage.class, message); 3016 compositeBroadcaster.callHooks(thePointcut, params); 3017 } 3018 } 3019 3020 public static int getMaximumPageSize() { 3021 if (myMaxPageSizeForTests != null) { 3022 return myMaxPageSizeForTests; 3023 } 3024 return MAXIMUM_PAGE_SIZE; 3025 } 3026 3027 public static void setMaxPageSizeForTest(Integer theTestSize) { 3028 myMaxPageSizeForTests = theTestSize; 3029 } 3030 3031 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3032 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3033 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3034 } 3035}