
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.model.util.JpaConstants; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 081import ca.uhn.fhir.jpa.util.SqlQueryList; 082import ca.uhn.fhir.model.api.IQueryParameterType; 083import ca.uhn.fhir.model.api.Include; 084import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 085import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 086import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 087import ca.uhn.fhir.rest.api.Constants; 088import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 089import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 090import ca.uhn.fhir.rest.api.SortOrderEnum; 091import ca.uhn.fhir.rest.api.SortSpec; 092import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 093import ca.uhn.fhir.rest.api.server.RequestDetails; 094import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 095import ca.uhn.fhir.rest.param.DateParam; 096import ca.uhn.fhir.rest.param.DateRangeParam; 097import ca.uhn.fhir.rest.param.ParamPrefixEnum; 098import ca.uhn.fhir.rest.param.ParameterUtil; 099import ca.uhn.fhir.rest.param.ReferenceParam; 100import ca.uhn.fhir.rest.param.StringParam; 101import ca.uhn.fhir.rest.param.TokenParam; 102import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 103import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 104import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 105import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 106import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 107import ca.uhn.fhir.system.HapiSystemProperties; 108import ca.uhn.fhir.util.SearchParameterUtil; 109import ca.uhn.fhir.util.StopWatch; 110import ca.uhn.fhir.util.StringUtil; 111import ca.uhn.fhir.util.UrlUtil; 112import com.google.common.annotations.VisibleForTesting; 113import com.google.common.collect.ListMultimap; 114import com.google.common.collect.Lists; 115import com.google.common.collect.MultimapBuilder; 116import com.healthmarketscience.sqlbuilder.Condition; 117import jakarta.annotation.Nonnull; 118import jakarta.annotation.Nullable; 119import jakarta.persistence.EntityManager; 120import jakarta.persistence.PersistenceContext; 121import jakarta.persistence.PersistenceContextType; 122import jakarta.persistence.Query; 123import jakarta.persistence.Tuple; 124import jakarta.persistence.TypedQuery; 125import jakarta.persistence.criteria.CriteriaBuilder; 126import jakarta.persistence.criteria.CriteriaQuery; 127import jakarta.persistence.criteria.Predicate; 128import jakarta.persistence.criteria.Root; 129import jakarta.persistence.criteria.Selection; 130import org.apache.commons.collections4.ListUtils; 131import org.apache.commons.lang3.StringUtils; 132import org.apache.commons.lang3.Validate; 133import org.apache.commons.lang3.math.NumberUtils; 134import org.apache.commons.lang3.tuple.Pair; 135import org.hibernate.ScrollMode; 136import org.hibernate.ScrollableResults; 137import org.hl7.fhir.instance.model.api.IAnyResource; 138import org.hl7.fhir.instance.model.api.IBaseResource; 139import org.hl7.fhir.instance.model.api.IIdType; 140import org.slf4j.Logger; 141import org.slf4j.LoggerFactory; 142import org.springframework.beans.factory.annotation.Autowired; 143import org.springframework.jdbc.core.JdbcTemplate; 144import org.springframework.transaction.support.TransactionSynchronizationManager; 145 146import java.util.ArrayList; 147import java.util.Arrays; 148import java.util.Collection; 149import java.util.Collections; 150import java.util.Comparator; 151import java.util.HashMap; 152import java.util.HashSet; 153import java.util.Iterator; 154import java.util.LinkedList; 155import java.util.List; 156import java.util.Map; 157import java.util.Objects; 158import java.util.Optional; 159import java.util.Set; 160import java.util.stream.Collectors; 161 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.NO_MORE; 163import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 165import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 166import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 167import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 168import static java.util.Objects.requireNonNull; 169import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 170import static org.apache.commons.lang3.StringUtils.isBlank; 171import static org.apache.commons.lang3.StringUtils.isNotBlank; 172import static org.apache.commons.lang3.StringUtils.stripStart; 173 174/** 175 * The SearchBuilder is responsible for actually forming the SQL query that handles 176 * searches for resources 177 */ 178public class SearchBuilder implements ISearchBuilder<JpaPid> { 179 180 /** 181 * See loadResourcesByPid 182 * for an explanation of why we use the constant 800 183 */ 184 // NB: keep public 185 @Deprecated 186 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 187 188 public static final String RESOURCE_ID_ALIAS = "resource_id"; 189 public static final String PARTITION_ID_ALIAS = "partition_id"; 190 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 191 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 192 193 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 194 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 195 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 196 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 197 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 198 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 199 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 200 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 201 public static Integer myMaxPageSizeForTests = null; 202 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 203 protected final IResourceTagDao myResourceTagDao; 204 private String myResourceName; 205 private final Class<? extends IBaseResource> myResourceType; 206 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 207 private final SqlObjectFactory mySqlBuilderFactory; 208 private final HibernatePropertiesProvider myDialectProvider; 209 private final ISearchParamRegistry mySearchParamRegistry; 210 private final PartitionSettings myPartitionSettings; 211 private final DaoRegistry myDaoRegistry; 212 private final FhirContext myContext; 213 private final IIdHelperService<JpaPid> myIdHelperService; 214 private final JpaStorageSettings myStorageSettings; 215 private final SearchQueryProperties mySearchProperties; 216 private final IResourceHistoryTableDao myResourceHistoryTableDao; 217 private final IJpaStorageResourceParser myJpaStorageResourceParser; 218 219 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 220 protected EntityManager myEntityManager; 221 222 private CriteriaBuilder myCriteriaBuilder; 223 private SearchParameterMap myParams; 224 private String mySearchUuid; 225 private int myFetchSize; 226 227 private boolean myRequiresTotal; 228 229 /** 230 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 231 */ 232 private Set<JpaPid> myPidSet; 233 234 private boolean myHasNextIteratorQuery = false; 235 private RequestPartitionId myRequestPartitionId; 236 237 private IFulltextSearchSvc myFulltextSearchSvc; 238 239 @Autowired(required = false) 240 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 241 myFulltextSearchSvc = theFulltextSearchSvc; 242 } 243 244 @Autowired(required = false) 245 private IElasticsearchSvc myIElasticsearchSvc; 246 247 @Autowired 248 private IResourceHistoryTagDao myResourceHistoryTagDao; 249 250 @Autowired 251 private IRequestPartitionHelperSvc myPartitionHelperSvc; 252 253 /** 254 * Constructor 255 */ 256 @SuppressWarnings({"rawtypes", "unchecked"}) 257 public SearchBuilder( 258 String theResourceName, 259 JpaStorageSettings theStorageSettings, 260 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 261 SqlObjectFactory theSqlBuilderFactory, 262 HibernatePropertiesProvider theDialectProvider, 263 ISearchParamRegistry theSearchParamRegistry, 264 PartitionSettings thePartitionSettings, 265 IInterceptorBroadcaster theInterceptorBroadcaster, 266 IResourceTagDao theResourceTagDao, 267 DaoRegistry theDaoRegistry, 268 FhirContext theContext, 269 IIdHelperService theIdHelperService, 270 IResourceHistoryTableDao theResourceHistoryTagDao, 271 IJpaStorageResourceParser theIJpaStorageResourceParser, 272 Class<? extends IBaseResource> theResourceType) { 273 myResourceName = theResourceName; 274 myResourceType = theResourceType; 275 myStorageSettings = theStorageSettings; 276 277 myEntityManagerFactory = theEntityManagerFactory; 278 mySqlBuilderFactory = theSqlBuilderFactory; 279 myDialectProvider = theDialectProvider; 280 mySearchParamRegistry = theSearchParamRegistry; 281 myPartitionSettings = thePartitionSettings; 282 myInterceptorBroadcaster = theInterceptorBroadcaster; 283 myResourceTagDao = theResourceTagDao; 284 myDaoRegistry = theDaoRegistry; 285 myContext = theContext; 286 myIdHelperService = theIdHelperService; 287 myResourceHistoryTableDao = theResourceHistoryTagDao; 288 myJpaStorageResourceParser = theIJpaStorageResourceParser; 289 290 mySearchProperties = new SearchQueryProperties(); 291 } 292 293 @VisibleForTesting 294 void setResourceName(String theName) { 295 myResourceName = theName; 296 } 297 298 @Override 299 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 300 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 301 } 302 303 @Override 304 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 305 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 306 } 307 308 @Override 309 public void setRequireTotal(boolean theRequireTotal) { 310 myRequiresTotal = theRequireTotal; 311 } 312 313 @Override 314 public boolean requiresTotal() { 315 return myRequiresTotal; 316 } 317 318 private void searchForIdsWithAndOr( 319 SearchQueryBuilder theSearchSqlBuilder, 320 QueryStack theQueryStack, 321 @Nonnull SearchParameterMap theParams, 322 RequestDetails theRequest) { 323 myParams = theParams; 324 mySearchProperties.setSortSpec(myParams.getSort()); 325 326 // Remove any empty parameters 327 theParams.clean(); 328 329 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 330 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 331 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 332 } 333 334 // Attempt to lookup via composite unique key. 335 if (isComboSearchCandidate()) { 336 attemptComboSearchParameterProcessing(theQueryStack, theParams, theRequest); 337 } 338 339 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 340 List<String> paramNames = myParams.keySet().stream() 341 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 342 .filter(t -> !t.equals(Constants.PARAM_TAG)) 343 .collect(Collectors.toList()); 344 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 345 paramNames.add(IAnyResource.SP_RES_ID); 346 } 347 if (myParams.containsKey(Constants.PARAM_TAG)) { 348 paramNames.add(Constants.PARAM_TAG); 349 } 350 351 // Handle each parameter 352 for (String nextParamName : paramNames) { 353 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 354 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 355 // Elasticsearch 356 continue; 357 } 358 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 359 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 360 .setParamName(nextParamName) 361 .setAndOrParams(andOrParams) 362 .setRequest(theRequest) 363 .setRequestPartitionId(myRequestPartitionId) 364 .setIncludeDeleted(myParams.getSearchIncludeDeletedMode())); 365 if (predicate != null) { 366 theSearchSqlBuilder.addPredicate(predicate); 367 } 368 } 369 } 370 371 /** 372 * This method returns <code>true</code> if the search is potentially a candidate for 373 * processing using a Combo SearchParameter. This means that: 374 * <ul> 375 * <li>Combo SearchParamdeters are enabled</li> 376 * <li>It's not an $everything search</li> 377 * <li>We're searching on a specific resource type</li> 378 * </ul> 379 */ 380 private boolean isComboSearchCandidate() { 381 return myStorageSettings.isUniqueIndexesEnabled() 382 && myParams.getEverythingMode() == null 383 && myResourceName != null; 384 } 385 386 @SuppressWarnings("ConstantConditions") 387 @Override 388 public Long createCountQuery( 389 SearchParameterMap theParams, 390 String theSearchUuid, 391 RequestDetails theRequest, 392 @Nonnull RequestPartitionId theRequestPartitionId) { 393 394 assert theRequestPartitionId != null; 395 assert TransactionSynchronizationManager.isActualTransactionActive(); 396 397 init(theParams, theSearchUuid, theRequestPartitionId); 398 399 if (checkUseHibernateSearch()) { 400 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 401 } 402 403 SearchQueryProperties properties = mySearchProperties.clone(); 404 properties.setDoCountOnlyFlag(true); 405 properties.setSortSpec(null); // counts don't require sorts 406 properties.setMaxResultsRequested(null); 407 properties.setOffset(null); 408 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 409 if (queries.isEmpty()) { 410 return 0L; 411 } else { 412 JpaPid jpaPid = queries.get(0).next(); 413 return jpaPid.getId(); 414 } 415 } 416 417 /** 418 * @param thePidSet May be null 419 */ 420 @Override 421 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 422 myPidSet = new HashSet<>(thePidSet); 423 } 424 425 @SuppressWarnings("ConstantConditions") 426 @Override 427 public IResultIterator<JpaPid> createQuery( 428 SearchParameterMap theParams, 429 SearchRuntimeDetails theSearchRuntimeDetails, 430 RequestDetails theRequest, 431 @Nonnull RequestPartitionId theRequestPartitionId) { 432 assert theRequestPartitionId != null; 433 assert TransactionSynchronizationManager.isActualTransactionActive(); 434 435 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 436 437 if (myPidSet == null) { 438 myPidSet = new HashSet<>(); 439 } 440 441 return new QueryIterator(theSearchRuntimeDetails, theRequest); 442 } 443 444 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 445 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 446 // we mutate the params. Make a private copy. 447 myParams = theParams.clone(); 448 mySearchProperties.setSortSpec(myParams.getSort()); 449 mySearchUuid = theSearchUuid; 450 myRequestPartitionId = theRequestPartitionId; 451 } 452 453 /** 454 * The query created can be either a count query or the 455 * actual query. 456 * This is why it takes a SearchQueryProperties object 457 * (and doesn't use the local version of it). 458 * The properties may differ slightly for whichever 459 * query this is. 460 */ 461 private List<ISearchQueryExecutor> createQuery( 462 SearchParameterMap theParams, 463 SearchQueryProperties theSearchProperties, 464 RequestDetails theRequest, 465 SearchRuntimeDetails theSearchRuntimeDetails) { 466 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 467 468 if (checkUseHibernateSearch()) { 469 // we're going to run at least part of the search against the Fulltext service. 470 471 // Ugh - we have two different return types for now 472 ISearchQueryExecutor fulltextExecutor = null; 473 List<JpaPid> fulltextMatchIds = null; 474 int resultCount = 0; 475 if (myParams.isLastN()) { 476 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 477 resultCount = fulltextMatchIds.size(); 478 } else if (myParams.getEverythingMode() != null) { 479 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 480 resultCount = fulltextMatchIds.size(); 481 } else { 482 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 483 // enabled SP indexing). 484 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 485 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 486 // extra work in Elastic. 487 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 488 489 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 490 // a page. 491 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 492 } 493 494 if (fulltextExecutor == null) { 495 fulltextExecutor = 496 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 497 } 498 499 if (theSearchRuntimeDetails != null) { 500 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 501 IInterceptorBroadcaster compositeBroadcaster = 502 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 503 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 504 HookParams params = new HookParams() 505 .add(RequestDetails.class, theRequest) 506 .addIfMatchesType(ServletRequestDetails.class, theRequest) 507 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 508 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 509 } 510 } 511 512 // can we skip the database entirely and return the pid list from here? 513 boolean canSkipDatabase = 514 // if we processed an AND clause, and it returned nothing, then nothing can match. 515 !fulltextExecutor.hasNext() 516 || 517 // Our hibernate search query doesn't respect partitions yet 518 (!myPartitionSettings.isPartitioningEnabled() 519 && 520 // were there AND terms left? Then we still need the db. 521 theParams.isEmpty() 522 && 523 // not every param is a param. :-( 524 theParams.getNearDistanceParam() == null 525 && 526 // todo MB don't we support _lastUpdated and _offset now? 527 theParams.getLastUpdated() == null 528 && theParams.getEverythingMode() == null 529 && theParams.getOffset() == null); 530 531 if (canSkipDatabase) { 532 ourLog.trace("Query finished after HSearch. Skip db query phase"); 533 if (theSearchProperties.hasMaxResultsRequested()) { 534 fulltextExecutor = SearchQueryExecutors.limited( 535 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 536 } 537 queries.add(fulltextExecutor); 538 } else { 539 ourLog.trace("Query needs db after HSearch. Chunking."); 540 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 541 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 542 QueryChunker.chunk( 543 fulltextExecutor, 544 SearchBuilder.getMaximumPageSize(), 545 // for each list of (SearchBuilder.getMaximumPageSize()) 546 // we create a chunked query and add it to 'queries' 547 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 548 } 549 } else { 550 // do everything in the database. 551 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 552 } 553 554 return queries; 555 } 556 557 /** 558 * Check to see if query should use Hibernate Search, and error if the query can't continue. 559 * 560 * @return true if the query should first be processed by Hibernate Search 561 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 562 */ 563 private boolean checkUseHibernateSearch() { 564 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 565 566 if (!fulltextEnabled) { 567 failIfUsed(Constants.PARAM_TEXT); 568 failIfUsed(Constants.PARAM_CONTENT); 569 } else { 570 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 571 final String paramName = sortSpec.getParamName(); 572 if (paramName.contains(".")) { 573 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 574 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 575 } 576 } 577 } 578 579 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 580 // can. 581 return fulltextEnabled 582 && myParams != null 583 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 584 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 585 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 586 } 587 588 private void failIfUsed(String theParamName) { 589 if (myParams.containsKey(theParamName)) { 590 throw new InvalidRequestException(Msg.code(1192) 591 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 592 } 593 } 594 595 private void failIfUsedWithChainedSort(String theParamName) { 596 if (myParams.containsKey(theParamName)) { 597 throw new InvalidRequestException(Msg.code(2524) 598 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 599 + theParamName); 600 } 601 } 602 603 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 604 // Can we use our hibernate search generated index on resource to support lastN?: 605 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 606 if (myFulltextSearchSvc == null) { 607 throw new InvalidRequestException(Msg.code(2027) 608 + "LastN operation is not enabled on this service, can not process this request"); 609 } 610 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 611 .map(t -> (JpaPid) t) 612 .collect(Collectors.toList()); 613 } else { 614 throw new InvalidRequestException( 615 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 616 } 617 } 618 619 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 620 JpaPid pid = null; 621 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 622 String idParamValue; 623 IQueryParameterType idParam = 624 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 625 if (idParam instanceof TokenParam idParm) { 626 idParamValue = idParm.getValue(); 627 } else { 628 StringParam idParm = (StringParam) idParam; 629 idParamValue = idParm.getValue(); 630 } 631 632 pid = myIdHelperService 633 .resolveResourceIdentity( 634 myRequestPartitionId, 635 myResourceName, 636 idParamValue, 637 ResolveIdentityMode.includeDeleted().cacheOk()) 638 .getPersistentId(); 639 } 640 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 641 } 642 643 private void doCreateChunkedQueries( 644 SearchParameterMap theParams, 645 List<JpaPid> thePids, 646 SearchQueryProperties theSearchQueryProperties, 647 RequestDetails theRequest, 648 ArrayList<ISearchQueryExecutor> theQueries) { 649 650 if (thePids.size() < getMaximumPageSize()) { 651 thePids = normalizeIdListForInClause(thePids); 652 } 653 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 654 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 655 } 656 657 /** 658 * Combs through the params for any _id parameters and extracts the PIDs for them 659 */ 660 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 661 // get all the IQueryParameterType objects 662 // for _id -> these should all be StringParam values 663 HashSet<IIdType> ids = new HashSet<>(); 664 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 665 for (List<IQueryParameterType> paramList : params) { 666 for (IQueryParameterType param : paramList) { 667 String id; 668 if (param instanceof StringParam) { 669 // we expect all _id values to be StringParams 670 id = ((StringParam) param).getValue(); 671 } else if (param instanceof TokenParam) { 672 id = ((TokenParam) param).getValue(); 673 } else { 674 // we do not expect the _id parameter to be a non-string value 675 throw new IllegalArgumentException( 676 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 677 } 678 679 IIdType idType = myContext.getVersion().newIdType(); 680 if (id.contains("/")) { 681 idType.setValue(id); 682 } else { 683 idType.setValue(myResourceName + "/" + id); 684 } 685 ids.add(idType); 686 } 687 } 688 689 // fetch our target Pids 690 // this will throw if an id is not found 691 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 692 myRequestPartitionId, 693 new ArrayList<>(ids), 694 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 695 696 // add the pids to targetPids 697 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 698 theTargetPids.add(pid.getPersistentId()); 699 } 700 } 701 702 private void createChunkedQuery( 703 SearchParameterMap theParams, 704 SearchQueryProperties theSearchProperties, 705 RequestDetails theRequest, 706 List<JpaPid> thePidList, 707 List<ISearchQueryExecutor> theSearchQueryExecutors) { 708 if (myParams.getEverythingMode() != null) { 709 createChunkedQueryForEverythingSearch( 710 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 711 } else { 712 createChunkedQueryNormalSearch( 713 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 714 } 715 } 716 717 private void createChunkedQueryNormalSearch( 718 SearchParameterMap theParams, 719 SearchQueryProperties theSearchProperties, 720 RequestDetails theRequest, 721 List<JpaPid> thePidList, 722 List<ISearchQueryExecutor> theSearchQueryExecutors) { 723 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 724 myContext, 725 myStorageSettings, 726 myPartitionSettings, 727 myRequestPartitionId, 728 myResourceName, 729 mySqlBuilderFactory, 730 myDialectProvider, 731 theSearchProperties.isDoCountOnlyFlag(), 732 myResourceName == null || myResourceName.isBlank()); 733 QueryStack queryStack3 = new QueryStack( 734 theRequest, 735 theParams, 736 myStorageSettings, 737 myContext, 738 sqlBuilder, 739 mySearchParamRegistry, 740 myPartitionSettings); 741 742 if (theParams.keySet().size() > 1 743 || theParams.getSort() != null 744 || theParams.keySet().contains(Constants.PARAM_HAS) 745 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 746 List<RuntimeSearchParam> activeComboParams = List.of(); 747 if (myResourceName != null) { 748 activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 749 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 750 } 751 if (activeComboParams.isEmpty()) { 752 sqlBuilder.setNeedResourceTableRoot(true); 753 } 754 } 755 756 /* 757 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 758 * specific filters with ORs as their root from working around the natural resource type / deletion 759 * status / partition IDs built into queries. 760 */ 761 if (theParams.containsKey(Constants.PARAM_FILTER)) { 762 Condition partitionIdPredicate = sqlBuilder 763 .getOrCreateResourceTablePredicateBuilder() 764 .createPartitionIdPredicate(myRequestPartitionId); 765 if (partitionIdPredicate != null) { 766 sqlBuilder.addPredicate(partitionIdPredicate); 767 } 768 } 769 770 // Normal search 771 // we will create a resourceTablePredicate if and only if we have an _id SP. 772 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 773 774 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 775 // partition ID predicate in that case. 776 if (!sqlBuilder.haveAtLeastOnePredicate()) { 777 Condition partitionIdPredicate; 778 779 if (theParams.getSearchIncludeDeletedMode() != null) { 780 partitionIdPredicate = sqlBuilder 781 .getOrCreateResourceTablePredicateBuilder(true, theParams.getSearchIncludeDeletedMode()) 782 .createPartitionIdPredicate(myRequestPartitionId); 783 } else { 784 partitionIdPredicate = sqlBuilder 785 .getOrCreateResourceTablePredicateBuilder() 786 .createPartitionIdPredicate(myRequestPartitionId); 787 } 788 789 if (partitionIdPredicate != null) { 790 sqlBuilder.addPredicate(partitionIdPredicate); 791 } 792 } 793 794 // Add PID list predicate for full text search and/or lastn operation 795 addPidListPredicate(thePidList, sqlBuilder); 796 797 // Last updated 798 addLastUpdatePredicate(sqlBuilder); 799 800 /* 801 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 802 * to something needed to guarantee correct results. 803 * 804 * Why do we need it? Suppose for example, a query like: 805 * Observation?category=foo,bar,baz 806 * And suppose you have many resources that have all 3 of these category codes. In this case 807 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 808 * we may exhaust the query results without getting enough distinct results back. When that 809 * happens we re-run the query with a larger limit. Excluding results we already know about 810 * tries to ensure that we get new unique results. 811 * 812 * The challenge with that though is that lots of DBs have an issue with too many 813 * parameters in one query. So we only do this optimization if there aren't too 814 * many results. 815 */ 816 if (myHasNextIteratorQuery) { 817 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 818 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 819 } 820 } 821 822 /* 823 * If offset is present, we want to deduplicate the results by using GROUP BY; 824 * OR 825 * if the MaxResultsToFetch is null, we are requesting "everything", 826 * so we'll let the db do the deduplication (instead of in-memory) 827 */ 828 if (theSearchProperties.isDeduplicateInDatabase()) { 829 queryStack3.addGrouping(); 830 queryStack3.setUseAggregate(true); 831 } 832 833 /* 834 * Sort 835 * 836 * If we have a sort, we wrap the criteria search (the search that actually 837 * finds the appropriate resources) in an outer search which is then sorted 838 */ 839 if (theSearchProperties.hasSort()) { 840 assert !theSearchProperties.isDoCountOnlyFlag(); 841 842 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 843 } 844 845 /* 846 * Now perform the search 847 */ 848 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 849 } 850 851 private void executeSearch( 852 SearchQueryProperties theProperties, 853 List<ISearchQueryExecutor> theSearchQueryExecutors, 854 SearchQueryBuilder sqlBuilder) { 855 GeneratedSql generatedSql = 856 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 857 if (!generatedSql.isMatchNothing()) { 858 SearchQueryExecutor executor = 859 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 860 theSearchQueryExecutors.add(executor); 861 } 862 } 863 864 private void createChunkedQueryForEverythingSearch( 865 RequestDetails theRequest, 866 SearchParameterMap theParams, 867 SearchQueryProperties theSearchQueryProperties, 868 List<JpaPid> thePidList, 869 List<ISearchQueryExecutor> theSearchQueryExecutors) { 870 871 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 872 myContext, 873 myStorageSettings, 874 myPartitionSettings, 875 myRequestPartitionId, 876 null, 877 mySqlBuilderFactory, 878 myDialectProvider, 879 theSearchQueryProperties.isDoCountOnlyFlag(), 880 false); 881 882 QueryStack queryStack3 = new QueryStack( 883 theRequest, 884 theParams, 885 myStorageSettings, 886 myContext, 887 sqlBuilder, 888 mySearchParamRegistry, 889 myPartitionSettings); 890 891 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 892 893 Set<JpaPid> targetPids = new HashSet<>(); 894 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 895 896 extractTargetPidsFromIdParams(targetPids); 897 898 // add the target pids to our executors as the first 899 // results iterator to go through 900 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 901 } else { 902 // For Everything queries, we make the query root by the ResourceLink table, since this query 903 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 904 // the one problem with this approach is that it doesn't catch Patients that have absolutely 905 // nothing linked to them. So we do one additional query to make sure we catch those too. 906 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 907 myContext, 908 myStorageSettings, 909 myPartitionSettings, 910 myRequestPartitionId, 911 myResourceName, 912 mySqlBuilderFactory, 913 myDialectProvider, 914 theSearchQueryProperties.isDoCountOnlyFlag(), 915 false); 916 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 917 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 918 String sql = allTargetsSql.getSql(); 919 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 920 921 List<JpaPid> output = 922 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 923 924 // we add a search executor to fetch unlinked patients first 925 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 926 } 927 928 List<String> typeSourceResources = new ArrayList<>(); 929 if (myParams.get(Constants.PARAM_TYPE) != null) { 930 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 931 } 932 933 queryStack3.addPredicateEverythingOperation( 934 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 935 936 // Add PID list predicate for full text search and/or lastn operation 937 addPidListPredicate(thePidList, sqlBuilder); 938 939 /* 940 * If offset is present, we want deduplicate the results by using GROUP BY 941 * ORDER BY is required to make sure we return unique results for each page 942 */ 943 if (theSearchQueryProperties.hasOffset()) { 944 queryStack3.addGrouping(); 945 queryStack3.addOrdering(); 946 queryStack3.setUseAggregate(true); 947 } 948 949 if (myParams.getEverythingMode().isPatient()) { 950 /* 951 * NB: patient-compartment limitation 952 * 953 * We are manually excluding Group and List resources 954 * from the patient-compartment for $everything operations on Patient type/instance. 955 * 956 * See issue: https://github.com/hapifhir/hapi-fhir/issues/7118 957 */ 958 sqlBuilder.excludeResourceTypesPredicate( 959 SearchParameterUtil.RESOURCE_TYPES_TO_SP_TO_OMIT_FROM_PATIENT_COMPARTMENT.keySet()); 960 } 961 962 /* 963 * Now perform the search 964 */ 965 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 966 } 967 968 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 969 if (thePidList != null && !thePidList.isEmpty()) { 970 theSqlBuilder.addResourceIdsPredicate(thePidList); 971 } 972 } 973 974 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 975 DateRangeParam lu = myParams.getLastUpdated(); 976 if (lu != null && !lu.isEmpty()) { 977 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 978 theSqlBuilder.addPredicate(lastUpdatedPredicates); 979 } 980 } 981 982 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 983 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 984 jdbcTemplate.setFetchSize(myFetchSize); 985 if (theMaximumResults != null) { 986 jdbcTemplate.setMaxRows(theMaximumResults); 987 } 988 return jdbcTemplate; 989 } 990 991 private Collection<String> extractTypeSourceResourcesFromParams() { 992 993 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 994 995 // first off, let's flatten the list of list 996 List<IQueryParameterType> iQueryParameterTypesList = 997 listOfList.stream().flatMap(List::stream).toList(); 998 999 // then, extract all elements of each CSV into one big list 1000 List<String> resourceTypes = iQueryParameterTypesList.stream() 1001 .map(param -> ((StringParam) param).getValue()) 1002 .map(csvString -> List.of(csvString.split(","))) 1003 .flatMap(List::stream) 1004 .toList(); 1005 1006 Set<String> knownResourceTypes = myContext.getResourceTypes(); 1007 1008 // remove leading/trailing whitespaces if any and remove duplicates 1009 Set<String> retVal = new HashSet<>(); 1010 1011 for (String type : resourceTypes) { 1012 String trimmed = type.trim(); 1013 if (!knownResourceTypes.contains(trimmed)) { 1014 throw new ResourceNotFoundException( 1015 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 1016 } 1017 retVal.add(trimmed); 1018 } 1019 1020 return retVal; 1021 } 1022 1023 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 1024 return myStorageSettings.isIndexOnContainedResources() 1025 && theParams.values().stream() 1026 .flatMap(Collection::stream) 1027 .flatMap(Collection::stream) 1028 .anyMatch(ReferenceParam.class::isInstance); 1029 } 1030 1031 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1032 if (theSort == null || isBlank(theSort.getParamName())) { 1033 return; 1034 } 1035 1036 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1037 1038 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1039 1040 theQueryStack.addSortOnResourceId(ascending); 1041 1042 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1043 1044 theQueryStack.addSortOnResourcePID(ascending); 1045 1046 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1047 1048 theQueryStack.addSortOnLastUpdated(ascending); 1049 1050 } else { 1051 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1052 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1053 1054 /* 1055 * If we have a sort like _sort=subject.name and we have an 1056 * uplifted refchain for that combination we can do it more efficiently 1057 * by using the index associated with the uplifted refchain. In this case, 1058 * we need to find the actual target search parameter (corresponding 1059 * to "name" in this example) so that we know what datatype it is. 1060 */ 1061 String paramName = theSort.getParamName(); 1062 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1063 String[] chains = StringUtils.split(paramName, '.'); 1064 if (chains.length == 2) { 1065 1066 // Given: Encounter?_sort=Patient:subject.name 1067 String referenceParam = chains[0]; // subject 1068 String referenceParamTargetType = null; // Patient 1069 String targetParam = chains[1]; // name 1070 1071 int colonIdx = referenceParam.indexOf(':'); 1072 if (colonIdx > -1) { 1073 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1074 referenceParam = referenceParam.substring(colonIdx + 1); 1075 } 1076 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1077 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1078 if (outerParam == null) { 1079 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1080 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1081 for (String nextTargetType : outerParam.getTargets()) { 1082 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1083 continue; 1084 } 1085 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1086 nextTargetType, 1087 targetParam, 1088 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1089 if (innerParam != null) { 1090 param = innerParam; 1091 break; 1092 } 1093 } 1094 } 1095 } 1096 } 1097 1098 int colonIdx = paramName.indexOf(':'); 1099 String referenceTargetType = null; 1100 if (colonIdx > -1) { 1101 referenceTargetType = paramName.substring(0, colonIdx); 1102 paramName = paramName.substring(colonIdx + 1); 1103 } 1104 1105 int dotIdx = paramName.indexOf('.'); 1106 String chainName = null; 1107 if (param == null && dotIdx > -1) { 1108 chainName = paramName.substring(dotIdx + 1); 1109 paramName = paramName.substring(0, dotIdx); 1110 if (chainName.contains(".")) { 1111 String msg = myContext 1112 .getLocalizer() 1113 .getMessageSanitized( 1114 BaseStorageDao.class, 1115 "invalidSortParameterTooManyChains", 1116 paramName + "." + chainName); 1117 throw new InvalidRequestException(Msg.code(2286) + msg); 1118 } 1119 } 1120 1121 if (param == null) { 1122 param = mySearchParamRegistry.getActiveSearchParam( 1123 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1124 } 1125 1126 if (param == null) { 1127 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1128 } 1129 1130 // param will never be null here (the above line throws if it does) 1131 // this is just to prevent the warning 1132 assert param != null; 1133 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1134 throw new InvalidRequestException( 1135 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1136 } 1137 1138 switch (param.getParamType()) { 1139 case STRING: 1140 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1141 break; 1142 case DATE: 1143 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1144 break; 1145 case REFERENCE: 1146 theQueryStack.addSortOnResourceLink( 1147 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1148 break; 1149 case TOKEN: 1150 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1151 break; 1152 case NUMBER: 1153 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1154 break; 1155 case URI: 1156 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1157 break; 1158 case QUANTITY: 1159 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1160 break; 1161 case COMPOSITE: 1162 List<JpaParamUtil.ComponentAndCorrespondingParam> compositeList = 1163 JpaParamUtil.resolveCompositeComponents(mySearchParamRegistry, param); 1164 if (compositeList == null) { 1165 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1166 + " is not defined by the resource " + myResourceName); 1167 } 1168 if (compositeList.size() != 2) { 1169 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1170 + " must have 2 composite types declared in parameter annotation, found " 1171 + compositeList.size()); 1172 } 1173 RuntimeSearchParam left = compositeList.get(0).getComponentParameter(); 1174 RuntimeSearchParam right = compositeList.get(1).getComponentParameter(); 1175 1176 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1177 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1178 1179 break; 1180 case SPECIAL: 1181 if (LOCATION_POSITION.equals(param.getPath())) { 1182 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1183 break; 1184 } 1185 throw new InvalidRequestException( 1186 Msg.code(2306) + "This server does not support _sort specifications of type " 1187 + param.getParamType() + " - Can't serve _sort=" + paramName); 1188 1189 case HAS: 1190 default: 1191 throw new InvalidRequestException( 1192 Msg.code(1197) + "This server does not support _sort specifications of type " 1193 + param.getParamType() + " - Can't serve _sort=" + paramName); 1194 } 1195 } 1196 1197 // Recurse 1198 createSort(theQueryStack, theSort.getChain(), theParams); 1199 } 1200 1201 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1202 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1203 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1204 String msg = myContext 1205 .getLocalizer() 1206 .getMessageSanitized( 1207 BaseStorageDao.class, 1208 "invalidSortParameter", 1209 theParamName, 1210 theResourceName, 1211 validSearchParameterNames); 1212 throw new InvalidRequestException(Msg.code(1194) + msg); 1213 } 1214 1215 private void createCompositeSort( 1216 QueryStack theQueryStack, 1217 RestSearchParameterTypeEnum theParamType, 1218 String theParamName, 1219 boolean theAscending) { 1220 1221 switch (theParamType) { 1222 case STRING: 1223 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1224 break; 1225 case DATE: 1226 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1227 break; 1228 case TOKEN: 1229 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1230 break; 1231 case QUANTITY: 1232 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1233 break; 1234 case NUMBER: 1235 case REFERENCE: 1236 case COMPOSITE: 1237 case URI: 1238 case HAS: 1239 case SPECIAL: 1240 default: 1241 throw new InvalidRequestException( 1242 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1243 + " on _sort=" + theParamName); 1244 } 1245 } 1246 1247 private void doLoadPids( 1248 RequestDetails theRequest, 1249 Collection<JpaPid> thePids, 1250 Collection<JpaPid> theIncludedPids, 1251 List<IBaseResource> theResourceListToPopulate, 1252 boolean theForHistoryOperation, 1253 Map<Long, Integer> thePosition) { 1254 1255 Map<JpaPid, Long> resourcePidToVersion = null; 1256 for (JpaPid next : thePids) { 1257 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1258 if (resourcePidToVersion == null) { 1259 resourcePidToVersion = new HashMap<>(); 1260 } 1261 resourcePidToVersion.put(next, next.getVersion()); 1262 } 1263 } 1264 1265 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1266 int expectedCount = versionlessPids.size(); 1267 if (versionlessPids.size() < getMaximumPageSize()) { 1268 /* 1269 * This method adds a bunch of extra params to the end of the parameter list 1270 * which are for a resource PID that will never exist (-1 / NO_MORE). We do this 1271 * so that the database can rely on a cached execution plan since we're not 1272 * generating a new SQL query for every possible number of resources. 1273 */ 1274 versionlessPids = normalizeIdListForInClause(versionlessPids); 1275 } 1276 1277 // Load the resource bodies 1278 List<JpaPidFk> historyVersionPks = JpaPidFk.fromPids(versionlessPids); 1279 List<ResourceHistoryTable> resourceSearchViewList = 1280 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable(historyVersionPks); 1281 1282 /* 1283 * If we have specific versions to load, replace the history entries with the 1284 * correct ones 1285 * 1286 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1287 * version entity first, and by batching the fetches. But this is a fairly infrequently 1288 * used feature, and loading history entities by PK is a very efficient query so it's 1289 * not the end of the world 1290 */ 1291 if (resourcePidToVersion != null) { 1292 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1293 ResourceHistoryTable next = resourceSearchViewList.get(i); 1294 JpaPid resourceId = next.getPersistentId(); 1295 Long version = resourcePidToVersion.get(resourceId); 1296 resourceId.setVersion(version); 1297 if (version != null && !version.equals(next.getVersion())) { 1298 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1299 next.getResourceId().toFk(), version); 1300 resourceSearchViewList.set(i, replacement); 1301 } 1302 } 1303 } 1304 1305 /* 1306 * If we got fewer rows back than we expected, that means that one or more ResourceTable 1307 * entities (HFJ_RESOURCE) have a RES_VER version which doesn't exist in the 1308 * ResourceHistoryTable (HFJ_RES_VER) table. This should never happen under normal 1309 * operation, but if someone manually deletes a row or otherwise ends up in a weird 1310 * state it can happen. In that case, we do a manual process of figuring out what 1311 * is the right version. 1312 */ 1313 if (resourceSearchViewList.size() != expectedCount) { 1314 1315 Set<JpaPid> loadedPks = resourceSearchViewList.stream() 1316 .map(ResourceHistoryTable::getResourceId) 1317 .collect(Collectors.toSet()); 1318 for (JpaPid nextWantedPid : versionlessPids) { 1319 if (!nextWantedPid.equals(NO_MORE) && !loadedPks.contains(nextWantedPid)) { 1320 Optional<ResourceHistoryTable> latestVersion = findLatestVersion( 1321 theRequest, nextWantedPid, myResourceHistoryTableDao, myInterceptorBroadcaster); 1322 latestVersion.ifPresent(resourceSearchViewList::add); 1323 } 1324 } 1325 } 1326 1327 // -- preload all tags with tag definition if any 1328 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1329 1330 for (ResourceHistoryTable next : resourceSearchViewList) { 1331 if (next.getDeleted() != null) { 1332 continue; 1333 } 1334 1335 Class<? extends IBaseResource> resourceType = 1336 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1337 1338 JpaPid resourceId = next.getPersistentId(); 1339 1340 if (resourcePidToVersion != null) { 1341 Long version = resourcePidToVersion.get(resourceId); 1342 resourceId.setVersion(version); 1343 } 1344 1345 IBaseResource resource; 1346 resource = myJpaStorageResourceParser.toResource( 1347 theRequest, resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1348 if (resource == null) { 1349 ourLog.warn( 1350 "Unable to find resource {}/{}/_history/{} in database", 1351 next.getResourceType(), 1352 next.getIdDt().getIdPart(), 1353 next.getVersion()); 1354 continue; 1355 } 1356 1357 Integer index = thePosition.get(resourceId.getId()); 1358 if (index == null) { 1359 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1360 continue; 1361 } 1362 1363 if (theIncludedPids.contains(resourceId)) { 1364 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1365 } else { 1366 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1367 } 1368 1369 // ensure there's enough space; "<=" because of 0-indexing 1370 while (theResourceListToPopulate.size() <= index) { 1371 theResourceListToPopulate.add(null); 1372 } 1373 theResourceListToPopulate.set(index, resource); 1374 } 1375 } 1376 1377 @SuppressWarnings("OptionalIsPresent") 1378 @Nonnull 1379 public static Optional<ResourceHistoryTable> findLatestVersion( 1380 RequestDetails theRequest, 1381 JpaPid nextWantedPid, 1382 IResourceHistoryTableDao resourceHistoryTableDao, 1383 IInterceptorBroadcaster interceptorBroadcaster1) { 1384 assert nextWantedPid != null && !nextWantedPid.equals(NO_MORE); 1385 1386 Optional<ResourceHistoryTable> latestVersion = resourceHistoryTableDao 1387 .findVersionsForResource(JpaConstants.SINGLE_RESULT, nextWantedPid.toFk()) 1388 .findFirst(); 1389 String warning; 1390 if (latestVersion.isPresent()) { 1391 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1392 + " specifies an unknown current version, returning version " 1393 + latestVersion.get().getVersion() 1394 + " instead. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1395 } else { 1396 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1397 + " specifies an unknown current version, and no versions of this resource exist. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1398 } 1399 1400 IInterceptorBroadcaster interceptorBroadcaster = 1401 CompositeInterceptorBroadcaster.newCompositeBroadcaster(interceptorBroadcaster1, theRequest); 1402 logAndBoradcastWarning(theRequest, warning, interceptorBroadcaster); 1403 return latestVersion; 1404 } 1405 1406 private static void logAndBoradcastWarning( 1407 RequestDetails theRequest, String warning, IInterceptorBroadcaster interceptorBroadcaster) { 1408 ourLog.warn(warning); 1409 1410 if (interceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) { 1411 HookParams params = new HookParams(); 1412 params.add(RequestDetails.class, theRequest); 1413 params.addIfMatchesType(ServletRequestDetails.class, theRequest); 1414 params.add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(warning)); 1415 interceptorBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params); 1416 } 1417 } 1418 1419 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1420 return switch (myStorageSettings.getTagStorageMode()) { 1421 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1422 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1423 case INLINE -> Map.of(); 1424 }; 1425 } 1426 1427 @Nonnull 1428 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1429 Collection<ResourceHistoryTable> theHistoryTables) { 1430 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1431 1432 // -- find all resource has tags 1433 for (ResourceHistoryTable resource : theHistoryTables) { 1434 if (resource.isHasTags()) { 1435 idList.add(resource.getId()); 1436 } 1437 } 1438 1439 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1440 1441 // -- no tags 1442 if (idList.isEmpty()) { 1443 return tagMap; 1444 } 1445 1446 // -- get all tags for the idList 1447 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1448 1449 // -- build the map, key = resourceId, value = list of ResourceTag 1450 JpaPid resourceId; 1451 Collection<BaseTag> tagCol; 1452 for (ResourceHistoryTag tag : tagList) { 1453 1454 resourceId = tag.getResourcePid(); 1455 tagCol = tagMap.get(resourceId); 1456 if (tagCol == null) { 1457 tagCol = new ArrayList<>(); 1458 tagCol.add(tag); 1459 tagMap.put(resourceId, tagCol); 1460 } else { 1461 tagCol.add(tag); 1462 } 1463 } 1464 1465 return tagMap; 1466 } 1467 1468 @Nonnull 1469 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1470 Collection<ResourceHistoryTable> theHistoryTables) { 1471 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1472 1473 // -- find all resource has tags 1474 for (ResourceHistoryTable resource : theHistoryTables) { 1475 if (resource.isHasTags()) { 1476 idList.add(resource.getResourceId()); 1477 } 1478 } 1479 1480 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1481 1482 // -- no tags 1483 if (idList.isEmpty()) { 1484 return tagMap; 1485 } 1486 1487 // -- get all tags for the idList 1488 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1489 1490 // -- build the map, key = resourceId, value = list of ResourceTag 1491 JpaPid resourceId; 1492 Collection<BaseTag> tagCol; 1493 for (ResourceTag tag : tagList) { 1494 1495 resourceId = tag.getResourceId(); 1496 tagCol = tagMap.get(resourceId); 1497 if (tagCol == null) { 1498 tagCol = new ArrayList<>(); 1499 tagCol.add(tag); 1500 tagMap.put(resourceId, tagCol); 1501 } else { 1502 tagCol.add(tag); 1503 } 1504 } 1505 1506 return tagMap; 1507 } 1508 1509 @Override 1510 public void loadResourcesByPid( 1511 Collection<JpaPid> thePids, 1512 Collection<JpaPid> theIncludedPids, 1513 List<IBaseResource> theResourceListToPopulate, 1514 boolean theForHistoryOperation, 1515 RequestDetails theRequestDetails) { 1516 if (thePids.isEmpty()) { 1517 ourLog.debug("The include pids are empty"); 1518 } 1519 1520 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1521 // when running asserts 1522 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1523 1524 Map<Long, Integer> position = new HashMap<>(); 1525 int index = 0; 1526 for (JpaPid next : thePids) { 1527 position.put(next.getId(), index++); 1528 } 1529 1530 // Can we fast track this loading by checking elastic search? 1531 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1532 if (isUsingElasticSearch) { 1533 try { 1534 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1535 return; 1536 1537 } catch (ResourceNotFoundInIndexException theE) { 1538 // some resources were not found in index, so we will inform this and resort to JPA search 1539 ourLog.warn( 1540 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1541 } 1542 } 1543 1544 // We only chunk because some jdbc drivers can't handle long param lists. 1545 QueryChunker.chunk( 1546 thePids, 1547 t -> doLoadPids( 1548 theRequestDetails, 1549 t, 1550 theIncludedPids, 1551 theResourceListToPopulate, 1552 theForHistoryOperation, 1553 position)); 1554 } 1555 1556 /** 1557 * Check if we can load the resources from Hibernate Search instead of the database. 1558 * We assume this is faster. 1559 * <p> 1560 * Hibernate Search only stores the current version, and only if enabled. 1561 * 1562 * @param thePids the pids to check for versioned references 1563 * @return can we fetch from Hibernate Search? 1564 */ 1565 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1566 // is storage enabled? 1567 return myStorageSettings.isStoreResourceInHSearchIndex() 1568 && myStorageSettings.isHibernateSearchIndexSearchParams() 1569 && 1570 // we don't support history 1571 thePids.stream().noneMatch(p -> p.getVersion() != null) 1572 && 1573 // skip the complexity for metadata in dstu2 1574 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1575 } 1576 1577 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1578 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1579 // only impl 1580 // to handle lastN? 1581 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1582 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1583 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1584 1585 return myFulltextSearchSvc.getResources(pidList); 1586 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1587 // legacy LastN implementation 1588 return myIElasticsearchSvc.getObservationResources(thePids); 1589 } else { 1590 return Collections.emptyList(); 1591 } 1592 } 1593 1594 /** 1595 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1596 * so it can't be Collections.emptySet() or some such thing. 1597 * The JpaPid returned will have resource type populated. 1598 */ 1599 @Override 1600 public Set<JpaPid> loadIncludes( 1601 FhirContext theContext, 1602 EntityManager theEntityManager, 1603 Collection<JpaPid> theMatches, 1604 Collection<Include> theIncludes, 1605 boolean theReverseMode, 1606 DateRangeParam theLastUpdated, 1607 String theSearchIdOrDescription, 1608 RequestDetails theRequest, 1609 Integer theMaxCount) { 1610 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1611 parameters.setFhirContext(theContext); 1612 parameters.setEntityManager(theEntityManager); 1613 parameters.setMatches(theMatches); 1614 parameters.setIncludeFilters(theIncludes); 1615 parameters.setReverseMode(theReverseMode); 1616 parameters.setLastUpdated(theLastUpdated); 1617 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1618 parameters.setRequestDetails(theRequest); 1619 parameters.setMaxCount(theMaxCount); 1620 return loadIncludes(parameters); 1621 } 1622 1623 @Override 1624 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1625 Collection<JpaPid> matches = theParameters.getMatches(); 1626 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1627 boolean reverseMode = theParameters.isReverseMode(); 1628 EntityManager entityManager = theParameters.getEntityManager(); 1629 Integer maxCount = theParameters.getMaxCount(); 1630 FhirContext fhirContext = theParameters.getFhirContext(); 1631 RequestDetails request = theParameters.getRequestDetails(); 1632 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1633 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1634 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1635 IInterceptorBroadcaster compositeBroadcaster = 1636 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1637 1638 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1639 CurrentThreadCaptureQueriesListener.startCapturing(); 1640 } 1641 if (matches.isEmpty()) { 1642 return new HashSet<>(); 1643 } 1644 if (currentIncludes == null || currentIncludes.isEmpty()) { 1645 return new HashSet<>(); 1646 } 1647 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1648 String searchPartitionIdFieldName = 1649 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1650 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1651 String findPartitionIdFieldName = 1652 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1653 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1654 String findVersionFieldName = null; 1655 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1656 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1657 } 1658 1659 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1660 HashSet<JpaPid> allAdded = new HashSet<>(); 1661 HashSet<JpaPid> original = new HashSet<>(matches); 1662 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1663 1664 int roundCounts = 0; 1665 StopWatch w = new StopWatch(); 1666 1667 boolean addedSomeThisRound; 1668 do { 1669 roundCounts++; 1670 1671 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1672 1673 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1674 Include nextInclude = iter.next(); 1675 if (!nextInclude.isRecurse()) { 1676 iter.remove(); 1677 } 1678 1679 // Account for _include=* 1680 boolean matchAll = "*".equals(nextInclude.getValue()); 1681 1682 // Account for _include=[resourceType]:* 1683 String wantResourceType = null; 1684 if (!matchAll) { 1685 if ("*".equals(nextInclude.getParamName())) { 1686 wantResourceType = nextInclude.getParamType(); 1687 matchAll = true; 1688 } 1689 } 1690 1691 if (matchAll) { 1692 loadIncludesMatchAll( 1693 findPidFieldName, 1694 findPartitionIdFieldName, 1695 findResourceTypeFieldName, 1696 findVersionFieldName, 1697 searchPidFieldName, 1698 searchPartitionIdFieldName, 1699 wantResourceType, 1700 reverseMode, 1701 hasDesiredResourceTypes, 1702 nextRoundMatches, 1703 entityManager, 1704 maxCount, 1705 desiredResourceTypes, 1706 pidsToInclude, 1707 request); 1708 } else { 1709 loadIncludesMatchSpecific( 1710 nextInclude, 1711 fhirContext, 1712 findPidFieldName, 1713 findPartitionIdFieldName, 1714 findVersionFieldName, 1715 searchPidFieldName, 1716 reverseMode, 1717 nextRoundMatches, 1718 entityManager, 1719 maxCount, 1720 pidsToInclude, 1721 request); 1722 } 1723 } 1724 1725 nextRoundMatches.clear(); 1726 for (JpaPid next : pidsToInclude) { 1727 if (!original.contains(next) && !allAdded.contains(next)) { 1728 nextRoundMatches.add(next); 1729 } else { 1730 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1731 } 1732 } 1733 1734 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1735 1736 if (maxCount != null && allAdded.size() >= maxCount) { 1737 break; 1738 } 1739 1740 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1741 1742 allAdded.removeAll(original); 1743 1744 ourLog.info( 1745 "Loaded {} {} in {} rounds and {} ms for search {}", 1746 allAdded.size(), 1747 reverseMode ? "_revincludes" : "_includes", 1748 roundCounts, 1749 w.getMillisAndRestart(), 1750 searchIdOrDescription); 1751 1752 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1753 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1754 } 1755 1756 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1757 // This can be used to remove results from the search result details before 1758 // the user has a chance to know that they were in the results 1759 if (!allAdded.isEmpty()) { 1760 1761 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1762 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1763 JpaPreResourceAccessDetails accessDetails = 1764 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1765 HookParams params = new HookParams() 1766 .add(IPreResourceAccessDetails.class, accessDetails) 1767 .add(RequestDetails.class, request) 1768 .addIfMatchesType(ServletRequestDetails.class, request); 1769 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1770 1771 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1772 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1773 JpaPid value = includedPidList.remove(i); 1774 if (value != null) { 1775 allAdded.remove(value); 1776 } 1777 } 1778 } 1779 } 1780 } 1781 1782 return allAdded; 1783 } 1784 1785 private void loadIncludesMatchSpecific( 1786 Include nextInclude, 1787 FhirContext fhirContext, 1788 String findPidFieldName, 1789 String findPartitionFieldName, 1790 String findVersionFieldName, 1791 String searchPidFieldName, 1792 boolean reverseMode, 1793 List<JpaPid> nextRoundMatches, 1794 EntityManager entityManager, 1795 Integer maxCount, 1796 HashSet<JpaPid> pidsToInclude, 1797 RequestDetails theRequest) { 1798 List<String> paths; 1799 1800 // Start replace 1801 RuntimeSearchParam param; 1802 String resType = nextInclude.getParamType(); 1803 if (isBlank(resType)) { 1804 return; 1805 } 1806 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1807 if (def == null) { 1808 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1809 return; 1810 } 1811 1812 String paramName = nextInclude.getParamName(); 1813 if (isNotBlank(paramName)) { 1814 param = mySearchParamRegistry.getActiveSearchParam( 1815 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1816 } else { 1817 param = null; 1818 } 1819 if (param == null) { 1820 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1821 return; 1822 } 1823 1824 paths = param.getPathsSplitForResourceType(resType); 1825 // end replace 1826 1827 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1828 1829 for (String nextPath : paths) { 1830 String findPidFieldSqlColumn = 1831 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1832 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1833 if (findVersionFieldName != null) { 1834 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1835 } 1836 if (myPartitionSettings.isDatabasePartitionMode()) { 1837 fieldsToLoad += ", r."; 1838 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1839 ? "partition_id" 1840 : "target_res_partition_id"; 1841 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1842 } 1843 1844 // Query for includes lookup has 2 cases 1845 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1846 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1847 // url in target_resource_url 1848 1849 // Case 1: 1850 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1851 1852 String searchPidFieldSqlColumn = 1853 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1854 StringBuilder localReferenceQuery = new StringBuilder(); 1855 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1856 localReferenceQuery.append(" FROM hfj_res_link r "); 1857 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1858 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1859 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1860 } 1861 localReferenceQuery 1862 .append(" AND r.") 1863 .append(searchPidFieldSqlColumn) 1864 .append(" IN (:target_pids) "); 1865 if (myPartitionSettings.isDatabasePartitionMode()) { 1866 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1867 ? "target_res_partition_id" 1868 : "partition_id"; 1869 localReferenceQuery 1870 .append("AND r.") 1871 .append(partitionFieldToSearch) 1872 .append(" = :search_partition_id "); 1873 } 1874 localReferenceQueryParams.put("src_path", nextPath); 1875 // we loop over target_pids later. 1876 if (targetResourceTypes != null) { 1877 if (targetResourceTypes.size() == 1) { 1878 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1879 localReferenceQueryParams.put( 1880 "target_resource_type", 1881 targetResourceTypes.iterator().next()); 1882 } else { 1883 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1884 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1885 } 1886 } 1887 1888 // Case 2: 1889 Pair<String, Map<String, Object>> canonicalQuery = 1890 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1891 1892 String sql = localReferenceQuery.toString(); 1893 if (canonicalQuery != null) { 1894 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1895 } 1896 1897 Map<String, Object> limitParams = new HashMap<>(); 1898 if (maxCount != null) { 1899 LinkedList<Object> bindVariables = new LinkedList<>(); 1900 sql = SearchQueryBuilder.applyLimitToSql( 1901 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1902 1903 // The dialect SQL limiter uses positional params, but we're using 1904 // named params here, so we need to replace the positional params 1905 // with equivalent named ones 1906 StringBuilder sb = new StringBuilder(); 1907 for (int i = 0; i < sql.length(); i++) { 1908 char nextChar = sql.charAt(i); 1909 if (nextChar == '?') { 1910 String nextName = "limit" + i; 1911 sb.append(':').append(nextName); 1912 limitParams.put(nextName, bindVariables.removeFirst()); 1913 } else { 1914 sb.append(nextChar); 1915 } 1916 } 1917 sql = sb.toString(); 1918 } 1919 1920 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1921 for (Collection<JpaPid> nextPartition : partitions) { 1922 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1923 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1924 if (myPartitionSettings.isDatabasePartitionMode()) { 1925 q.setParameter( 1926 "search_partition_id", 1927 nextPartition.iterator().next().getPartitionId()); 1928 } 1929 localReferenceQueryParams.forEach(q::setParameter); 1930 if (canonicalQuery != null) { 1931 canonicalQuery.getRight().forEach(q::setParameter); 1932 } 1933 limitParams.forEach(q::setParameter); 1934 1935 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1936 Tuple result; 1937 while (iter.hasNext()) { 1938 result = iter.next(); 1939 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1940 Long resourceVersion = null; 1941 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1942 resourceVersion = 1943 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1944 } 1945 Integer partitionId = null; 1946 if (myPartitionSettings.isDatabasePartitionMode()) { 1947 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1948 } 1949 1950 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1951 pid.setPartitionId(partitionId); 1952 pidsToInclude.add(pid); 1953 } 1954 } 1955 // myEntityManager.clear(); 1956 } 1957 } 1958 } 1959 1960 private void loadIncludesMatchAll( 1961 String findPidFieldName, 1962 String findPartitionFieldName, 1963 String findResourceTypeFieldName, 1964 String findVersionFieldName, 1965 String searchPidFieldName, 1966 String searchPartitionFieldName, 1967 String wantResourceType, 1968 boolean reverseMode, 1969 boolean hasDesiredResourceTypes, 1970 List<JpaPid> nextRoundMatches, 1971 EntityManager entityManager, 1972 Integer maxCount, 1973 List<String> desiredResourceTypes, 1974 HashSet<JpaPid> pidsToInclude, 1975 RequestDetails request) { 1976 1977 record IncludesRecord( 1978 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1979 1980 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1981 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1982 Root<ResourceLink> root = query.from(ResourceLink.class); 1983 1984 List<Selection<?>> selectionList = new ArrayList<>(); 1985 selectionList.add(root.get(findPidFieldName)); 1986 selectionList.add(root.get(findResourceTypeFieldName)); 1987 selectionList.add(root.get("myTargetResourceUrl")); 1988 if (findVersionFieldName != null) { 1989 selectionList.add(root.get(findVersionFieldName)); 1990 } else { 1991 selectionList.add(cb.nullLiteral(Long.class)); 1992 } 1993 if (myPartitionSettings.isDatabasePartitionMode()) { 1994 selectionList.add(root.get(findPartitionFieldName)); 1995 } else { 1996 selectionList.add(cb.nullLiteral(Integer.class)); 1997 } 1998 query.multiselect(selectionList); 1999 2000 List<Predicate> predicates = new ArrayList<>(); 2001 2002 if (myPartitionSettings.isDatabasePartitionMode()) { 2003 predicates.add( 2004 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 2005 } 2006 2007 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 2008 2009 /* 2010 * We need to set the resource type in 2 cases only: 2011 * 1) we are in $everything mode 2012 * (where we only want to fetch specific resource types, regardless of what is 2013 * available to fetch) 2014 * 2) we are doing revincludes 2015 * 2016 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 2017 * should always be checking the source resource type on the resource link. We don't 2018 * actually index that column though by default, so in order to try and be efficient 2019 * we don't actually include it for includes (but we do for revincludes). This is 2020 * because for an include, it doesn't really make sense to include a different 2021 * resource type than the one you are searching on. 2022 */ 2023 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 2024 // because mySourceResourceType is not part of the HFJ_RES_LINK 2025 // index, this might not be the most optimal performance. 2026 // but it is for an $everything operation (and maybe we should update the index) 2027 predicates.add( 2028 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 2029 } else { 2030 wantResourceType = null; 2031 } 2032 2033 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 2034 // resources 2035 // (e.g. via Provenance, List, or Group) when in an $everything operation 2036 if (myParams != null 2037 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 2038 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 2039 predicates.add(cb.not(root.get("mySourceResourceType") 2040 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 2041 } 2042 2043 if (hasDesiredResourceTypes) { 2044 predicates.add( 2045 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 2046 } 2047 2048 query.where(cb.and(predicates.toArray(new Predicate[0]))); 2049 2050 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 2051 for (Collection<JpaPid> nextPartition : partitions) { 2052 2053 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 2054 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 2055 if (myPartitionSettings.isDatabasePartitionMode()) { 2056 q.setParameter( 2057 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 2058 } 2059 if (wantResourceType != null) { 2060 q.setParameter("want_resource_type", wantResourceType); 2061 } 2062 if (maxCount != null) { 2063 q.setMaxResults(maxCount); 2064 } 2065 if (hasDesiredResourceTypes) { 2066 q.setParameter("desired_target_resource_types", desiredResourceTypes); 2067 } 2068 2069 Set<String> canonicalUrls = null; 2070 2071 try (ScrollableResultsIterator<IncludesRecord> iter = 2072 new ScrollableResultsIterator<>(toScrollableResults(q))) { 2073 IncludesRecord nextRow; 2074 while (iter.hasNext()) { 2075 nextRow = iter.next(); 2076 if (nextRow == null) { 2077 // This can happen if there are outgoing references which are canonical or point to 2078 // other servers 2079 continue; 2080 } 2081 2082 Long version = nextRow.version; 2083 Long resourceId = nextRow.resourceId; 2084 String resourceType = nextRow.resourceType; 2085 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 2086 Integer partitionId = nextRow.partitionId; 2087 2088 if (resourceId != null) { 2089 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 2090 pid.setPartitionId(partitionId); 2091 pidsToInclude.add(pid); 2092 } else if (resourceCanonicalUrl != null) { 2093 if (canonicalUrls == null) { 2094 canonicalUrls = new HashSet<>(); 2095 } 2096 canonicalUrls.add(resourceCanonicalUrl); 2097 } 2098 } 2099 } 2100 2101 if (canonicalUrls != null) { 2102 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 2103 } 2104 } 2105 } 2106 2107 private void loadCanonicalUrls( 2108 RequestDetails theRequestDetails, 2109 Set<String> theCanonicalUrls, 2110 EntityManager theEntityManager, 2111 HashSet<JpaPid> thePidsToInclude, 2112 boolean theReverse) { 2113 StringBuilder sqlBuilder; 2114 CanonicalUrlTargets canonicalUrlTargets = 2115 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2116 if (canonicalUrlTargets.isEmpty()) { 2117 return; 2118 } 2119 2120 String message = 2121 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2122 firePerformanceWarning(theRequestDetails, message); 2123 2124 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2125 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2126 2127 sqlBuilder = new StringBuilder(); 2128 sqlBuilder.append("SELECT "); 2129 if (myPartitionSettings.isPartitioningEnabled()) { 2130 sqlBuilder.append("i.myPartitionIdValue, "); 2131 } 2132 sqlBuilder.append("i.myResourcePid "); 2133 2134 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2135 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2136 sqlBuilder.append("AND i.myUri IN (:uris)"); 2137 2138 String canonicalResSql = sqlBuilder.toString(); 2139 2140 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2141 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2142 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2143 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2144 List<Object[]> results = canonicalResIdQuery.getResultList(); 2145 for (var next : results) { 2146 if (next != null) { 2147 Integer partitionId = null; 2148 Long pid; 2149 if (next.length == 1) { 2150 pid = (Long) next[0]; 2151 } else { 2152 partitionId = (Integer) ((Object[]) next)[0]; 2153 pid = (Long) ((Object[]) next)[1]; 2154 } 2155 if (pid != null) { 2156 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2157 } 2158 } 2159 } 2160 } 2161 } 2162 2163 /** 2164 * Calls Performance Trace Hook 2165 * 2166 * @param request the request deatils 2167 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2168 */ 2169 private void callRawSqlHookWithCurrentThreadQueries( 2170 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2171 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2172 HookParams params = new HookParams() 2173 .add(RequestDetails.class, request) 2174 .addIfMatchesType(ServletRequestDetails.class, request) 2175 .add(SqlQueryList.class, capturedQueries); 2176 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2177 } 2178 2179 @Nullable 2180 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2181 String targetResourceType = nextInclude.getParamTargetType(); 2182 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2183 Set<String> targetResourceTypes; 2184 if (targetResourceType != null) { 2185 targetResourceTypes = Set.of(targetResourceType); 2186 } else if (haveTargetTypesDefinedByParam) { 2187 targetResourceTypes = param.getTargets(); 2188 } else { 2189 // all types! 2190 targetResourceTypes = null; 2191 } 2192 return targetResourceTypes; 2193 } 2194 2195 @Nullable 2196 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2197 String theVersionFieldName, 2198 Set<String> theTargetResourceTypes, 2199 boolean theReverse, 2200 RequestDetails theRequest, 2201 RuntimeSearchParam theParam) { 2202 2203 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2204 2205 // If we know for sure that none of the paths involved in this SearchParameter could 2206 // be indexing a canonical 2207 if (Arrays.stream(searchParameterPaths) 2208 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2209 myContext, myResourceName, t, theReverse))) { 2210 return null; 2211 } 2212 2213 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2214 if (theVersionFieldName != null) { 2215 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2216 fieldsToLoadFromSpidxUriTable += ", NULL"; 2217 } 2218 2219 if (myPartitionSettings.isDatabasePartitionMode()) { 2220 if (theReverse) { 2221 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2222 } else { 2223 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2224 } 2225 } 2226 2227 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2228 // But sp_name isn't indexed, so we use hash_identity instead. 2229 CanonicalUrlTargets canonicalUrlTargets = 2230 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2231 if (canonicalUrlTargets.isEmpty()) { 2232 return null; 2233 } 2234 2235 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2236 StringBuilder canonicalUrlQuery = new StringBuilder(); 2237 canonicalUrlQuery 2238 .append("SELECT ") 2239 .append(fieldsToLoadFromSpidxUriTable) 2240 .append(' '); 2241 canonicalUrlQuery.append("FROM hfj_res_link r "); 2242 2243 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2244 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2245 if (myPartitionSettings.isDatabasePartitionMode()) { 2246 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2247 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2248 } 2249 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2250 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2251 canonicalUriQueryParams.put( 2252 "uri_identity_hash", 2253 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2254 } else { 2255 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2256 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2257 } 2258 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2259 canonicalUrlQuery.append(")"); 2260 2261 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2262 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2263 canonicalUrlQuery.append(" AND"); 2264 if (myPartitionSettings.isDatabasePartitionMode()) { 2265 if (theReverse) { 2266 canonicalUrlQuery.append(" rUri.partition_id"); 2267 } else { 2268 canonicalUrlQuery.append(" r.partition_id"); 2269 } 2270 canonicalUrlQuery.append(" = :search_partition_id"); 2271 canonicalUrlQuery.append(" AND"); 2272 } 2273 if (theReverse) { 2274 canonicalUrlQuery.append(" rUri.res_id"); 2275 } else { 2276 canonicalUrlQuery.append(" r.src_resource_id"); 2277 } 2278 canonicalUrlQuery.append(" IN (:target_pids)"); 2279 2280 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2281 } 2282 2283 @Nonnull 2284 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2285 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2286 Set<String> targetResourceTypes = theTargetResourceTypes; 2287 if (targetResourceTypes == null) { 2288 /* 2289 * If we don't have a list of valid target types, we need to figure out a list of all 2290 * possible target types in order to perform the search of the URI index table. This is 2291 * because the hash_identity column encodes the resource type, so we'll need a hash 2292 * value for each possible target type. 2293 */ 2294 targetResourceTypes = new HashSet<>(); 2295 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2296 if (theReverse) { 2297 // For reverse includes, it is really hard to figure out what types 2298 // are actually potentially pointing to the type we're searching for 2299 // in this context, so let's just assume it could be anything. 2300 targetResourceTypes = possibleTypes; 2301 } else { 2302 List<RuntimeSearchParam> params = mySearchParamRegistry 2303 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2304 .values() 2305 .stream() 2306 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2307 .toList(); 2308 for (var next : params) { 2309 2310 String paths = next.getPath(); 2311 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2312 2313 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2314 myContext, myResourceName, path, theReverse)) { 2315 continue; 2316 } 2317 2318 if (!next.getTargets().isEmpty()) { 2319 // For each reference parameter on the resource type we're searching for, 2320 // add all the potential target types to the list of possible target 2321 // resource types we can look up. 2322 for (var nextTarget : next.getTargets()) { 2323 if (possibleTypes.contains(nextTarget)) { 2324 targetResourceTypes.add(nextTarget); 2325 } 2326 } 2327 } else { 2328 // If we have any references that don't define any target types, then 2329 // we need to assume that all enabled resource types are possible target 2330 // types 2331 targetResourceTypes.addAll(possibleTypes); 2332 break; 2333 } 2334 } 2335 } 2336 } 2337 } 2338 2339 if (targetResourceTypes.isEmpty()) { 2340 return new CanonicalUrlTargets(Set.of(), Set.of()); 2341 } 2342 2343 Set<Long> hashIdentityValues = new HashSet<>(); 2344 Set<Integer> partitionIds = new HashSet<>(); 2345 for (String type : targetResourceTypes) { 2346 2347 RequestPartitionId readPartition; 2348 if (myPartitionSettings.isPartitioningEnabled()) { 2349 readPartition = 2350 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2351 } else { 2352 readPartition = RequestPartitionId.defaultPartition(); 2353 } 2354 if (readPartition.hasPartitionIds()) { 2355 partitionIds.addAll(readPartition.getPartitionIds()); 2356 } 2357 2358 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2359 myPartitionSettings, readPartition, type, "url"); 2360 hashIdentityValues.add(hashIdentity); 2361 } 2362 2363 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2364 } 2365 2366 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2367 public boolean isEmpty() { 2368 return hashIdentityValues.isEmpty(); 2369 } 2370 } 2371 2372 /** 2373 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2374 * those pids where: 2375 * <ul> 2376 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2377 * <li>Each list only contains JpaPids with the same partition ID</li> 2378 * </ul> 2379 */ 2380 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2381 2382 if (theNextRoundMatches.size() <= theMaxLoad) { 2383 boolean allSamePartition = true; 2384 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2385 if (!Objects.equals( 2386 theNextRoundMatches.get(i - 1).getPartitionId(), 2387 theNextRoundMatches.get(i).getPartitionId())) { 2388 allSamePartition = false; 2389 break; 2390 } 2391 } 2392 if (allSamePartition) { 2393 return Collections.singletonList(theNextRoundMatches); 2394 } 2395 } 2396 2397 // Break into partitioned sublists 2398 ListMultimap<String, JpaPid> lists = 2399 MultimapBuilder.hashKeys().arrayListValues().build(); 2400 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2401 String partitionId = nextRoundMatch.getPartitionId() != null 2402 ? nextRoundMatch.getPartitionId().toString() 2403 : ""; 2404 lists.put(partitionId, nextRoundMatch); 2405 } 2406 2407 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2408 for (String key : lists.keySet()) { 2409 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2410 retVal.addAll(nextPartition); 2411 } 2412 2413 // In unit test mode, we sort the results just for unit test predictability 2414 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2415 retVal = retVal.stream() 2416 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2417 .collect(Collectors.toList()); 2418 } 2419 2420 return retVal; 2421 } 2422 2423 /** 2424 * If any Combo SearchParameters match the given query parameters, add a predicate 2425 * to {@literal theQueryStack} and remove the parameters from {@literal theParams}. 2426 * This method handles both UNIQUE and NON_UNIQUE combo parameters. 2427 */ 2428 private void attemptComboSearchParameterProcessing( 2429 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2430 2431 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2432 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2433 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2434 2435 List<JpaParamUtil.ComponentAndCorrespondingParam> nextCandidateComponents = 2436 JpaParamUtil.resolveCompositeComponents(mySearchParamRegistry, nextCandidate); 2437 2438 /* 2439 * First, a quick and dirty check to see if we have a parameter in the current search 2440 * that contains all the parameters for the candidate combo search parameter. We do 2441 * a more nuanced check later to make sure that the parameters have appropriate values, 2442 * modifiers, etc. so this doesn't need to be perfect in terms of rejecting bad matches. 2443 * It just needs to fail fast if the search couldn't possibly be a match for the 2444 * candidate so we can move on quickly. 2445 */ 2446 boolean noMatch = false; 2447 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : nextCandidateComponents) { 2448 if (!theParams.containsKey(nextComponent.getParamName()) 2449 && !theParams.containsKey(nextComponent.getCombinedParamName())) { 2450 noMatch = true; 2451 break; 2452 } 2453 } 2454 if (noMatch) { 2455 continue; 2456 } 2457 2458 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : nextCandidateComponents) { 2459 ensureSubListsAreWritable(theParams.get(nextComponent.getParamName())); 2460 ensureSubListsAreWritable(theParams.get(nextComponent.getCombinedParamName())); 2461 } 2462 2463 /* 2464 * Apply search against the combo param index in a loop: 2465 * 2466 * 1. First we check whether the actual parameter values in the 2467 * parameter map are actually usable for searching against the combo 2468 * param index. E.g. no search modifiers, date comparators, etc., 2469 * since these mean you can't use the combo index. 2470 * 2471 * 2. Apply and create the join SQl. We remove parameter values from 2472 * the map as we apply them, so any parameter values remaining in the 2473 * map after each loop haven't yet been factored into the SQL. 2474 * 2475 * The loop allows us to create multiple combo index joins if there 2476 * are multiple AND expressions for the related parameters. 2477 */ 2478 boolean matched; 2479 do { 2480 matched = applyComboSearchParamIfAppropriate( 2481 theRequest, theQueryStack, theParams, nextCandidate, nextCandidateComponents); 2482 } while (matched); 2483 } 2484 } 2485 2486 /** 2487 * Attempts to apply a Combo SearchParameter to the current search. Assuming some or all parameters of 2488 * the search are appropriate for the given Combo SearchParameter, a predicate is created and added to 2489 * the QueryStack, and the parameters are removed from the search parameters map. 2490 * 2491 * @param theRequest The RequestDetails for the current search. 2492 * @param theQueryStack The current SQL builder QueryStack to add a predicate to. 2493 * @param theParams The search parameters for the current search. 2494 * @param theComboParam The Combo SearchParameter to apply. 2495 * @param theComboParamComponents The components of the Combo SearchParameter. 2496 * @return Returns <code>true</code> if the Combo SearchParameter was applied successfully. 2497 */ 2498 private boolean applyComboSearchParamIfAppropriate( 2499 RequestDetails theRequest, 2500 QueryStack theQueryStack, 2501 @Nonnull SearchParameterMap theParams, 2502 RuntimeSearchParam theComboParam, 2503 List<JpaParamUtil.ComponentAndCorrespondingParam> theComboParamComponents) { 2504 2505 List<List<IQueryParameterType>> inputs = new ArrayList<>(theComboParamComponents.size()); 2506 List<Runnable> searchParameterConsumerTasks = new ArrayList<>(theComboParamComponents.size()); 2507 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : theComboParamComponents) { 2508 boolean foundMatch = false; 2509 2510 /* 2511 * The following List<List<IQueryParameterType>> is a list of query parameters where the 2512 * outer list contains AND combinations, and the inner lists contain OR combinations. 2513 * For each component in the Combo SearchParameter, we need to find a list of OR parameters 2514 * (i.e. the inner List) which is appropriate for the given component. 2515 * 2516 * We can only use a combo param when the query parameter is fairly basic 2517 * (no modifiers such as :missing or :below, references are qualified with 2518 * a resource type, etc.) Once we've confirmed that we have a parameter for 2519 * each component, we remove the components from the source SearchParameterMap 2520 * since we're going to consume them and add a predicate to the SQL builder. 2521 */ 2522 List<List<IQueryParameterType>> sameNameParametersAndList = theParams.get(nextComponent.getParamName()); 2523 if (sameNameParametersAndList != null) { 2524 boolean parameterIsChained = false; 2525 for (int andIndex = 0; andIndex < sameNameParametersAndList.size(); andIndex++) { 2526 List<IQueryParameterType> sameNameParametersOrList = sameNameParametersAndList.get(andIndex); 2527 IQueryParameterType firstValue = sameNameParametersOrList.get(0); 2528 2529 if (firstValue instanceof ReferenceParam refParam) { 2530 if (!Objects.equals(nextComponent.getChain(), refParam.getChain())) { 2531 continue; 2532 } 2533 } 2534 2535 if (!validateParamValuesAreValidForComboParam( 2536 theRequest, theParams, theComboParam, nextComponent, sameNameParametersOrList)) { 2537 continue; 2538 } 2539 2540 inputs.add(sameNameParametersOrList); 2541 searchParameterConsumerTasks.add(() -> sameNameParametersAndList.remove(sameNameParametersOrList)); 2542 foundMatch = true; 2543 break; 2544 } 2545 } else if (!nextComponent.getParamName().equals(nextComponent.getCombinedParamName())) { 2546 2547 /* 2548 * If we didn't find any parameters for the parameter name (e.g. "patient") and 2549 * we're looking for a chained parameter (e.g. "patient.identifier"), check if 2550 * there are any matches for the full combined parameter name 2551 * (e.g. "patient.identifier"). 2552 */ 2553 List<List<IQueryParameterType>> combinedNameParametersAndList = 2554 theParams.get(nextComponent.getCombinedParamName()); 2555 if (combinedNameParametersAndList != null) { 2556 for (int andIndex = 0; andIndex < combinedNameParametersAndList.size(); andIndex++) { 2557 List<IQueryParameterType> combinedNameParametersOrList = 2558 combinedNameParametersAndList.get(andIndex); 2559 if (!combinedNameParametersOrList.isEmpty()) { 2560 2561 if (!validateParamValuesAreValidForComboParam( 2562 theRequest, 2563 theParams, 2564 theComboParam, 2565 nextComponent, 2566 combinedNameParametersOrList)) { 2567 continue; 2568 } 2569 2570 inputs.add(combinedNameParametersOrList); 2571 searchParameterConsumerTasks.add( 2572 () -> combinedNameParametersAndList.remove(combinedNameParametersOrList)); 2573 foundMatch = true; 2574 break; 2575 } 2576 } 2577 } 2578 } 2579 2580 if (!foundMatch) { 2581 return false; 2582 } 2583 } 2584 2585 if (CartesianProductUtil.calculateCartesianProductSize(inputs) > 500) { 2586 ourLog.debug( 2587 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2588 return false; 2589 } 2590 2591 searchParameterConsumerTasks.forEach(Runnable::run); 2592 2593 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2594 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2595 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2596 2597 List<String> parameters = new ArrayList<>(); 2598 for (int paramIndex = 0; paramIndex < theComboParamComponents.size(); paramIndex++) { 2599 2600 JpaParamUtil.ComponentAndCorrespondingParam componentAndCorrespondingParam = 2601 theComboParamComponents.get(paramIndex); 2602 String nextParamName = componentAndCorrespondingParam.getCombinedParamName(); 2603 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2604 2605 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2606 // As a result, we strip the prefix if present. 2607 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(), EQUAL.getValue()); 2608 2609 RestSearchParameterTypeEnum paramType = JpaParamUtil.getParameterTypeForComposite( 2610 mySearchParamRegistry, componentAndCorrespondingParam); 2611 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2612 if (paramType == RestSearchParameterTypeEnum.STRING) { 2613 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2614 } 2615 } 2616 2617 if (paramType == RestSearchParameterTypeEnum.TOKEN) { 2618 2619 /* 2620 * The gender SP indexes a fixed binding ValueSet with a single CodeSystem, so we 2621 * infer the codesystem just to be friendly to clients who don't provide it 2622 * in the search. 2623 */ 2624 if ("gender".equals(componentAndCorrespondingParam.getParamName()) 2625 || "gender".equals(componentAndCorrespondingParam.getChain())) { 2626 if (!nextOrValue.contains("|")) { 2627 nextOrValue = "http://hl7.org/fhir/administrative-gender|" + nextOrValue; 2628 } 2629 } 2630 } 2631 2632 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2633 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2634 2635 parameters.add(nextParamName + "=" + nextOrValue); 2636 } 2637 2638 // Make sure the parameters end up in the search URL in the same order 2639 // we would index them in (we also alphabetically sort when we create 2640 // the index rows) 2641 Collections.sort(parameters); 2642 2643 StringBuilder searchStringBuilder = new StringBuilder(); 2644 searchStringBuilder.append(myResourceName); 2645 for (int i = 0; i < parameters.size(); i++) { 2646 if (i == 0) { 2647 searchStringBuilder.append("?"); 2648 } else { 2649 searchStringBuilder.append("&"); 2650 } 2651 searchStringBuilder.append(parameters.get(i)); 2652 } 2653 2654 String indexString = searchStringBuilder.toString(); 2655 ourLog.debug( 2656 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2657 2658 indexStrings.add(indexString); 2659 } 2660 2661 // Just to make sure we're stable for tests 2662 indexStrings.sort(Comparator.naturalOrder()); 2663 2664 // Interceptor broadcast: JPA_PERFTRACE_INFO 2665 IInterceptorBroadcaster compositeBroadcaster = 2666 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2667 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2668 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2669 StorageProcessingMessage msg = new StorageProcessingMessage() 2670 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2671 + indexStringForLog); 2672 HookParams params = new HookParams() 2673 .add(RequestDetails.class, theRequest) 2674 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2675 .add(StorageProcessingMessage.class, msg); 2676 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2677 } 2678 2679 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2680 case UNIQUE: 2681 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2682 break; 2683 case NON_UNIQUE: 2684 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2685 break; 2686 } 2687 2688 // Remove any empty parameters remaining after this 2689 theParams.clean(); 2690 2691 return true; 2692 } 2693 2694 /** 2695 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2696 * searching against a combo param with the given parameter names. This might be {@literal false} if 2697 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2698 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2699 */ 2700 private boolean validateParamValuesAreValidForComboParam( 2701 RequestDetails theRequest, 2702 @Nonnull SearchParameterMap theParams, 2703 RuntimeSearchParam theComboParam, 2704 JpaParamUtil.ComponentAndCorrespondingParam theComboComponent, 2705 List<IQueryParameterType> theValues) { 2706 2707 for (IQueryParameterType nextOrValue : theValues) { 2708 if (nextOrValue instanceof DateParam dateParam) { 2709 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2710 String message = "Search with params " + describeParams(theParams) 2711 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2712 + theComboComponent.getCombinedParamName() + "'"; 2713 firePerformanceInfo(theRequest, message); 2714 return false; 2715 } 2716 } 2717 2718 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2719 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2720 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2721 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2722 if (prefix != null && prefix != EQUAL) { 2723 String message = "Search with params " + describeParams(theParams) 2724 + " is not a candidate for combo searching - Parameter '" 2725 + theComboComponent.getCombinedParamName() 2726 + "' has prefix: '" 2727 + paramWithPrefix.getPrefix().getValue() + "'"; 2728 firePerformanceInfo(theRequest, message); 2729 return false; 2730 } 2731 } 2732 2733 // Reference params are only eligible for using a composite index if they 2734 // are qualified 2735 boolean haveChain = false; 2736 if (nextOrValue instanceof ReferenceParam refParam) { 2737 haveChain = refParam.hasChain(); 2738 if (theComboComponent.getChain() == null && isBlank(refParam.getResourceType())) { 2739 String message = 2740 "Search is not a candidate for unique combo searching - Reference with no type specified for parameter '" 2741 + theComboComponent.getCombinedParamName() + "'"; 2742 firePerformanceInfo(theRequest, message); 2743 return false; 2744 } 2745 } 2746 2747 // Qualifiers such as :missing can't be resolved by a combo param 2748 if (!haveChain && isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2749 String message = "Search with params " + describeParams(theParams) 2750 + " is not a candidate for combo searching - Parameter '" 2751 + theComboComponent.getCombinedParamName() 2752 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2753 firePerformanceInfo(theRequest, message); 2754 return false; 2755 } 2756 2757 // Date params are not eligible for using composite unique index 2758 // as index could contain date with different precision (e.g. DAY, SECOND) 2759 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2760 if (nextOrValue instanceof DateParam) { 2761 ourLog.debug( 2762 "Search with params {} is not a candidate for combo searching - " 2763 + "Unique combo search parameter '{}' has DATE type", 2764 describeParams(theParams), 2765 theComboComponent); 2766 return false; 2767 } 2768 } 2769 } 2770 2771 return true; 2772 } 2773 2774 @Nonnull 2775 private static String describeParams(@Nonnull SearchParameterMap theParams) { 2776 return '[' + theParams.keySet().stream().sorted().collect(Collectors.joining(", ")) + ']'; 2777 } 2778 2779 private <T> void ensureSubListsAreWritable(@Nullable List<List<T>> theListOfLists) { 2780 if (theListOfLists != null) { 2781 for (int i = 0; i < theListOfLists.size(); i++) { 2782 List<T> oldSubList = theListOfLists.get(i); 2783 if (!(oldSubList instanceof ArrayList)) { 2784 List<T> newSubList = new ArrayList<>(oldSubList); 2785 theListOfLists.set(i, newSubList); 2786 } 2787 } 2788 } 2789 } 2790 2791 @Override 2792 public void setFetchSize(int theFetchSize) { 2793 myFetchSize = theFetchSize; 2794 } 2795 2796 public SearchParameterMap getParams() { 2797 return myParams; 2798 } 2799 2800 public CriteriaBuilder getBuilder() { 2801 return myCriteriaBuilder; 2802 } 2803 2804 public Class<? extends IBaseResource> getResourceType() { 2805 return myResourceType; 2806 } 2807 2808 public String getResourceName() { 2809 return myResourceName; 2810 } 2811 2812 /** 2813 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2814 */ 2815 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2816 2817 private final RequestDetails myRequest; 2818 private final Set<JpaPid> myCurrentPids; 2819 private Iterator<JpaPid> myCurrentIterator; 2820 private JpaPid myNext; 2821 2822 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2823 myCurrentPids = new HashSet<>(thePidSet); 2824 myCurrentIterator = null; 2825 myRequest = theRequest; 2826 } 2827 2828 private void fetchNext() { 2829 while (myNext == null) { 2830 2831 if (myCurrentIterator == null) { 2832 Set<Include> includes = new HashSet<>(); 2833 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2834 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2835 for (IQueryParameterType type : typeList) { 2836 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken()); 2837 for (String resourceType : queryString.split(",")) { 2838 String rt = resourceType.trim(); 2839 if (isNotBlank(rt)) { 2840 includes.add(new Include(rt + ":*", true)); 2841 } 2842 } 2843 } 2844 } 2845 } 2846 if (includes.isEmpty()) { 2847 includes.add(new Include("*", true)); 2848 } 2849 Set<JpaPid> newPids = loadIncludes( 2850 myContext, 2851 myEntityManager, 2852 myCurrentPids, 2853 includes, 2854 false, 2855 getParams().getLastUpdated(), 2856 mySearchUuid, 2857 myRequest, 2858 null); 2859 myCurrentIterator = newPids.iterator(); 2860 } 2861 2862 if (myCurrentIterator.hasNext()) { 2863 myNext = myCurrentIterator.next(); 2864 } else { 2865 myNext = NO_MORE; 2866 } 2867 } 2868 } 2869 2870 @Override 2871 public boolean hasNext() { 2872 fetchNext(); 2873 return !NO_MORE.equals(myNext); 2874 } 2875 2876 @Override 2877 public JpaPid next() { 2878 fetchNext(); 2879 JpaPid retVal = myNext; 2880 myNext = null; 2881 return retVal; 2882 } 2883 } 2884 /** 2885 * Basic Query iterator, used to fetch the results of a query. 2886 */ 2887 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2888 2889 private final SearchRuntimeDetails mySearchRuntimeDetails; 2890 2891 private final RequestDetails myRequest; 2892 private final boolean myHaveRawSqlHooks; 2893 private final boolean myHavePerfTraceFoundIdHook; 2894 private final Integer myOffset; 2895 private final IInterceptorBroadcaster myCompositeBroadcaster; 2896 private boolean myFirst = true; 2897 private IncludesIterator myIncludesIterator; 2898 /** 2899 * The next JpaPid value of the next result in this query. 2900 * Will not be null if fetched using getNext() 2901 */ 2902 private JpaPid myNext; 2903 /** 2904 * The current query result iterator running sql and supplying PIDs 2905 * @see #myQueryList 2906 */ 2907 private ISearchQueryExecutor myResultsIterator; 2908 2909 private boolean myFetchIncludesForEverythingOperation; 2910 2911 /** 2912 * The count of resources skipped because they were seen in earlier results 2913 */ 2914 private int mySkipCount = 0; 2915 /** 2916 * The count of resources that are new in this search 2917 * (ie, not cached in previous searches) 2918 */ 2919 private int myNonSkipCount = 0; 2920 /** 2921 * The list of queries to use to find all results. 2922 * Normal JPA queries will normally have a single entry. 2923 * Queries that involve Hibernate Search/Elasticsearch may have 2924 * multiple queries because of chunking. 2925 * The $everything operation also jams some extra results in. 2926 */ 2927 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2928 2929 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2930 mySearchRuntimeDetails = theSearchRuntimeDetails; 2931 myOffset = myParams.getOffset(); 2932 myRequest = theRequest; 2933 myCompositeBroadcaster = 2934 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2935 2936 // everything requires fetching recursively all related resources 2937 if (myParams.getEverythingMode() != null) { 2938 myFetchIncludesForEverythingOperation = true; 2939 } 2940 2941 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2942 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2943 } 2944 2945 private void fetchNext() { 2946 try { 2947 if (myHaveRawSqlHooks) { 2948 CurrentThreadCaptureQueriesListener.startCapturing(); 2949 } 2950 2951 // If we don't have a query yet, create one 2952 if (myResultsIterator == null) { 2953 if (!mySearchProperties.hasMaxResultsRequested()) { 2954 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2955 } 2956 2957 /* 2958 * assigns the results iterator 2959 * and populates the myQueryList. 2960 */ 2961 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2962 } 2963 2964 if (myNext == null) { 2965 // no next means we need a new query (if one is available) 2966 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2967 /* 2968 * Because we combine our DB searches with Lucene 2969 * sometimes we can have multiple results iterators 2970 * (with only some having data in them to extract). 2971 * 2972 * We'll iterate our results iterators until we 2973 * either run out of results iterators, or we 2974 * have one that actually has data in it. 2975 */ 2976 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2977 retrieveNextIteratorQuery(); 2978 } 2979 2980 if (!myResultsIterator.hasNext()) { 2981 // we couldn't find a results iterator; 2982 // we're done here 2983 break; 2984 } 2985 2986 JpaPid nextPid = myResultsIterator.next(); 2987 if (myHavePerfTraceFoundIdHook) { 2988 callPerformanceTracingHook(nextPid); 2989 } 2990 2991 if (nextPid != null) { 2992 if (!myPidSet.contains(nextPid)) { 2993 if (!mySearchProperties.isDeduplicateInDatabase()) { 2994 /* 2995 * We only add to the map if we aren't fetching "everything"; 2996 * otherwise, we let the de-duplication happen in the database 2997 * (see createChunkedQueryNormalSearch above), because it 2998 * saves memory that way. 2999 */ 3000 myPidSet.add(nextPid); 3001 } 3002 if (doNotSkipNextPidForEverything()) { 3003 myNext = nextPid; 3004 myNonSkipCount++; 3005 break; 3006 } 3007 } else { 3008 mySkipCount++; 3009 } 3010 } 3011 3012 if (!myResultsIterator.hasNext()) { 3013 if (mySearchProperties.hasMaxResultsRequested() 3014 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 3015 if (mySkipCount > 0 && myNonSkipCount == 0) { 3016 sendProcessingMsgAndFirePerformanceHook(); 3017 // need the next iterator; increase the maxsize 3018 // (we should always do this) 3019 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 3020 mySearchProperties.setMaxResultsRequested(maxResults); 3021 3022 if (!mySearchProperties.isDeduplicateInDatabase()) { 3023 // if we're not using the database to deduplicate 3024 // we should recheck our memory usage 3025 // the prefetch size check is future proofing 3026 int prefetchSize = myStorageSettings 3027 .getSearchPreFetchThresholds() 3028 .size(); 3029 if (prefetchSize > 0) { 3030 if (myStorageSettings 3031 .getSearchPreFetchThresholds() 3032 .get(prefetchSize - 1) 3033 < mySearchProperties.getMaxResultsRequested()) { 3034 mySearchProperties.setDeduplicateInDatabase(true); 3035 } 3036 } 3037 } 3038 3039 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 3040 } 3041 } 3042 } 3043 } 3044 } 3045 3046 if (myNext == null) { 3047 // if we got here, it means the current JpaPid has already been processed, 3048 // and we will decide (here) if we need to fetch related resources recursively 3049 if (myFetchIncludesForEverythingOperation) { 3050 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 3051 myFetchIncludesForEverythingOperation = false; 3052 } 3053 if (myIncludesIterator != null) { 3054 while (myIncludesIterator.hasNext()) { 3055 JpaPid next = myIncludesIterator.next(); 3056 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 3057 myNext = next; 3058 break; 3059 } 3060 } 3061 if (myNext == null) { 3062 myNext = NO_MORE; 3063 } 3064 } else { 3065 myNext = NO_MORE; 3066 } 3067 } 3068 3069 if (!mySearchProperties.hasMaxResultsRequested()) { 3070 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 3071 } else { 3072 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 3073 } 3074 3075 } finally { 3076 // search finished - fire hooks 3077 if (myHaveRawSqlHooks) { 3078 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 3079 } 3080 } 3081 3082 if (myFirst) { 3083 HookParams params = new HookParams() 3084 .add(RequestDetails.class, myRequest) 3085 .addIfMatchesType(ServletRequestDetails.class, myRequest) 3086 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 3087 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 3088 myFirst = false; 3089 } 3090 3091 if (NO_MORE.equals(myNext)) { 3092 HookParams params = new HookParams() 3093 .add(RequestDetails.class, myRequest) 3094 .addIfMatchesType(ServletRequestDetails.class, myRequest) 3095 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 3096 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 3097 } 3098 } 3099 3100 private Integer calculateMaxResultsToFetch() { 3101 if (myParams.getLoadSynchronousUpTo() != null) { 3102 return myParams.getLoadSynchronousUpTo(); 3103 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 3104 return myParams.getEverythingMode() != null 3105 ? myParams.getOffset() + myParams.getCount() 3106 : myParams.getCount(); 3107 } else { 3108 return myStorageSettings.getFetchSizeDefaultMaximum(); 3109 } 3110 } 3111 3112 private boolean doNotSkipNextPidForEverything() { 3113 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 3114 } 3115 3116 private void callPerformanceTracingHook(JpaPid theNextPid) { 3117 HookParams params = new HookParams() 3118 .add(Integer.class, System.identityHashCode(this)) 3119 .add(Object.class, theNextPid); 3120 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 3121 } 3122 3123 private void sendProcessingMsgAndFirePerformanceHook() { 3124 String msg = "Pass completed with no matching results seeking rows " 3125 + myPidSet.size() + "-" + mySkipCount 3126 + ". This indicates an inefficient query! Retrying with new max count of " 3127 + mySearchProperties.getMaxResultsRequested(); 3128 firePerformanceWarning(myRequest, msg); 3129 } 3130 3131 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 3132 Integer offset = theOffset; 3133 if (myQueryList.isEmpty()) { 3134 // Capture times for Lucene/Elasticsearch queries as well 3135 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3136 3137 // setting offset to 0 to fetch all resource ids to guarantee 3138 // correct output result for everything operation during paging 3139 if (myParams.getEverythingMode() != null) { 3140 offset = 0; 3141 } 3142 3143 SearchQueryProperties properties = mySearchProperties.clone(); 3144 properties 3145 .setOffset(offset) 3146 .setMaxResultsRequested(theMaxResultsToFetch) 3147 .setDoCountOnlyFlag(false) 3148 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 3149 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 3150 } 3151 3152 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3153 3154 retrieveNextIteratorQuery(); 3155 3156 mySkipCount = 0; 3157 myNonSkipCount = 0; 3158 } 3159 3160 private void retrieveNextIteratorQuery() { 3161 close(); 3162 if (isNotEmpty(myQueryList)) { 3163 myResultsIterator = myQueryList.remove(0); 3164 myHasNextIteratorQuery = true; 3165 } else { 3166 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 3167 myHasNextIteratorQuery = false; 3168 } 3169 } 3170 3171 @Override 3172 public boolean hasNext() { 3173 if (myNext == null) { 3174 fetchNext(); 3175 } 3176 return !NO_MORE.equals(myNext); 3177 } 3178 3179 @Override 3180 public JpaPid next() { 3181 fetchNext(); 3182 JpaPid retVal = myNext; 3183 myNext = null; 3184 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 3185 return retVal; 3186 } 3187 3188 @Override 3189 public int getSkippedCount() { 3190 return mySkipCount; 3191 } 3192 3193 @Override 3194 public int getNonSkippedCount() { 3195 return myNonSkipCount; 3196 } 3197 3198 @Override 3199 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 3200 Collection<JpaPid> batch = new ArrayList<>(); 3201 while (this.hasNext() && batch.size() < theBatchSize) { 3202 batch.add(this.next()); 3203 } 3204 return batch; 3205 } 3206 3207 @Override 3208 public void close() { 3209 if (myResultsIterator != null) { 3210 myResultsIterator.close(); 3211 } 3212 myResultsIterator = null; 3213 } 3214 } 3215 3216 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 3217 // Only log at debug level since these messages aren't considered important enough 3218 // that we should be cluttering the system log, but they are important to the 3219 // specific query being executed to we'll INFO level them there 3220 ourLog.debug(theMessage); 3221 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 3222 } 3223 3224 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3225 ourLog.warn(theMessage); 3226 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3227 } 3228 3229 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3230 IInterceptorBroadcaster compositeBroadcaster = 3231 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3232 if (compositeBroadcaster.hasHooks(thePointcut)) { 3233 StorageProcessingMessage message = new StorageProcessingMessage(); 3234 message.setMessage(theMessage); 3235 HookParams params = new HookParams() 3236 .add(RequestDetails.class, theRequest) 3237 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3238 .add(StorageProcessingMessage.class, message); 3239 compositeBroadcaster.callHooks(thePointcut, params); 3240 } 3241 } 3242 3243 public static int getMaximumPageSize() { 3244 if (myMaxPageSizeForTests != null) { 3245 return myMaxPageSizeForTests; 3246 } 3247 return MAXIMUM_PAGE_SIZE; 3248 } 3249 3250 public static void setMaxPageSizeForTest(Integer theTestSize) { 3251 myMaxPageSizeForTests = theTestSize; 3252 } 3253 3254 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3255 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3256 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3257 } 3258}