
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 052import ca.uhn.fhir.jpa.model.entity.BaseTag; 053import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 056import ca.uhn.fhir.jpa.model.entity.ResourceLink; 057import ca.uhn.fhir.jpa.model.entity.ResourceTag; 058import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 059import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 060import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 061import ca.uhn.fhir.jpa.model.util.JpaConstants; 062import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 063import ca.uhn.fhir.jpa.search.SearchConstants; 064import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 065import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 066import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 067import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 069import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 070import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 071import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 072import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 073import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 074import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 075import ca.uhn.fhir.jpa.util.BaseIterator; 076import ca.uhn.fhir.jpa.util.CartesianProductUtil; 077import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 078import ca.uhn.fhir.jpa.util.QueryChunker; 079import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.SearchParameterUtil; 108import ca.uhn.fhir.util.StopWatch; 109import ca.uhn.fhir.util.StringUtil; 110import ca.uhn.fhir.util.UrlUtil; 111import com.google.common.annotations.VisibleForTesting; 112import com.google.common.collect.ListMultimap; 113import com.google.common.collect.Lists; 114import com.google.common.collect.MultimapBuilder; 115import com.healthmarketscience.sqlbuilder.Condition; 116import jakarta.annotation.Nonnull; 117import jakarta.annotation.Nullable; 118import jakarta.persistence.EntityManager; 119import jakarta.persistence.PersistenceContext; 120import jakarta.persistence.PersistenceContextType; 121import jakarta.persistence.Query; 122import jakarta.persistence.Tuple; 123import jakarta.persistence.TypedQuery; 124import jakarta.persistence.criteria.CriteriaBuilder; 125import jakarta.persistence.criteria.CriteriaQuery; 126import jakarta.persistence.criteria.Predicate; 127import jakarta.persistence.criteria.Root; 128import jakarta.persistence.criteria.Selection; 129import org.apache.commons.collections4.ListUtils; 130import org.apache.commons.lang3.StringUtils; 131import org.apache.commons.lang3.Validate; 132import org.apache.commons.lang3.math.NumberUtils; 133import org.apache.commons.lang3.tuple.Pair; 134import org.hibernate.ScrollMode; 135import org.hibernate.ScrollableResults; 136import org.hl7.fhir.instance.model.api.IAnyResource; 137import org.hl7.fhir.instance.model.api.IBaseResource; 138import org.hl7.fhir.instance.model.api.IIdType; 139import org.slf4j.Logger; 140import org.slf4j.LoggerFactory; 141import org.springframework.beans.factory.annotation.Autowired; 142import org.springframework.jdbc.core.JdbcTemplate; 143import org.springframework.transaction.support.TransactionSynchronizationManager; 144 145import java.util.ArrayList; 146import java.util.Arrays; 147import java.util.Collection; 148import java.util.Collections; 149import java.util.Comparator; 150import java.util.HashMap; 151import java.util.HashSet; 152import java.util.Iterator; 153import java.util.LinkedList; 154import java.util.List; 155import java.util.Map; 156import java.util.Objects; 157import java.util.Optional; 158import java.util.Set; 159import java.util.stream.Collectors; 160 161import static ca.uhn.fhir.jpa.model.util.JpaConstants.NO_MORE; 162import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 163import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 164import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 165import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 166import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 167import static java.util.Objects.requireNonNull; 168import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 169import static org.apache.commons.lang3.StringUtils.isBlank; 170import static org.apache.commons.lang3.StringUtils.isNotBlank; 171import static org.apache.commons.lang3.StringUtils.stripStart; 172 173/** 174 * The SearchBuilder is responsible for actually forming the SQL query that handles 175 * searches for resources 176 */ 177public class SearchBuilder implements ISearchBuilder<JpaPid> { 178 179 /** 180 * See loadResourcesByPid 181 * for an explanation of why we use the constant 800 182 */ 183 // NB: keep public 184 @Deprecated 185 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 186 187 public static final String RESOURCE_ID_ALIAS = "resource_id"; 188 public static final String PARTITION_ID_ALIAS = "partition_id"; 189 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 190 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 191 192 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 193 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 194 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 195 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 196 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 197 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 198 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 199 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 200 public static Integer myMaxPageSizeForTests = null; 201 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 202 protected final IResourceTagDao myResourceTagDao; 203 private String myResourceName; 204 private final Class<? extends IBaseResource> myResourceType; 205 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 206 private final SqlObjectFactory mySqlBuilderFactory; 207 private final HibernatePropertiesProvider myDialectProvider; 208 private final ISearchParamRegistry mySearchParamRegistry; 209 private final PartitionSettings myPartitionSettings; 210 private final DaoRegistry myDaoRegistry; 211 private final FhirContext myContext; 212 private final IIdHelperService<JpaPid> myIdHelperService; 213 private final JpaStorageSettings myStorageSettings; 214 private final SearchQueryProperties mySearchProperties; 215 private final IResourceHistoryTableDao myResourceHistoryTableDao; 216 private final IJpaStorageResourceParser myJpaStorageResourceParser; 217 218 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 219 protected EntityManager myEntityManager; 220 221 private CriteriaBuilder myCriteriaBuilder; 222 private SearchParameterMap myParams; 223 private String mySearchUuid; 224 private int myFetchSize; 225 226 private boolean myRequiresTotal; 227 228 /** 229 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 230 */ 231 private Set<JpaPid> myPidSet; 232 233 private boolean myHasNextIteratorQuery = false; 234 private RequestPartitionId myRequestPartitionId; 235 236 private IFulltextSearchSvc myFulltextSearchSvc; 237 238 @Autowired(required = false) 239 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 240 myFulltextSearchSvc = theFulltextSearchSvc; 241 } 242 243 @Autowired(required = false) 244 private IElasticsearchSvc myIElasticsearchSvc; 245 246 @Autowired 247 private IResourceHistoryTagDao myResourceHistoryTagDao; 248 249 @Autowired 250 private IRequestPartitionHelperSvc myPartitionHelperSvc; 251 252 /** 253 * Constructor 254 */ 255 @SuppressWarnings({"rawtypes", "unchecked"}) 256 public SearchBuilder( 257 String theResourceName, 258 JpaStorageSettings theStorageSettings, 259 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 260 SqlObjectFactory theSqlBuilderFactory, 261 HibernatePropertiesProvider theDialectProvider, 262 ISearchParamRegistry theSearchParamRegistry, 263 PartitionSettings thePartitionSettings, 264 IInterceptorBroadcaster theInterceptorBroadcaster, 265 IResourceTagDao theResourceTagDao, 266 DaoRegistry theDaoRegistry, 267 FhirContext theContext, 268 IIdHelperService theIdHelperService, 269 IResourceHistoryTableDao theResourceHistoryTagDao, 270 IJpaStorageResourceParser theIJpaStorageResourceParser, 271 Class<? extends IBaseResource> theResourceType) { 272 myResourceName = theResourceName; 273 myResourceType = theResourceType; 274 myStorageSettings = theStorageSettings; 275 276 myEntityManagerFactory = theEntityManagerFactory; 277 mySqlBuilderFactory = theSqlBuilderFactory; 278 myDialectProvider = theDialectProvider; 279 mySearchParamRegistry = theSearchParamRegistry; 280 myPartitionSettings = thePartitionSettings; 281 myInterceptorBroadcaster = theInterceptorBroadcaster; 282 myResourceTagDao = theResourceTagDao; 283 myDaoRegistry = theDaoRegistry; 284 myContext = theContext; 285 myIdHelperService = theIdHelperService; 286 myResourceHistoryTableDao = theResourceHistoryTagDao; 287 myJpaStorageResourceParser = theIJpaStorageResourceParser; 288 289 mySearchProperties = new SearchQueryProperties(); 290 } 291 292 @VisibleForTesting 293 void setResourceName(String theName) { 294 myResourceName = theName; 295 } 296 297 @Override 298 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 299 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 300 } 301 302 @Override 303 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 304 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 305 } 306 307 @Override 308 public void setRequireTotal(boolean theRequireTotal) { 309 myRequiresTotal = theRequireTotal; 310 } 311 312 @Override 313 public boolean requiresTotal() { 314 return myRequiresTotal; 315 } 316 317 private void searchForIdsWithAndOr( 318 SearchQueryBuilder theSearchSqlBuilder, 319 QueryStack theQueryStack, 320 @Nonnull SearchParameterMap theParams, 321 RequestDetails theRequest) { 322 myParams = theParams; 323 mySearchProperties.setSortSpec(myParams.getSort()); 324 325 // Remove any empty parameters 326 theParams.clean(); 327 328 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 329 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 330 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 331 } 332 333 // Attempt to lookup via composite unique key. 334 if (isComboSearchCandidate()) { 335 attemptComboSearchParameterProcessing(theQueryStack, theParams, theRequest); 336 } 337 338 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 339 List<String> paramNames = myParams.keySet().stream() 340 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 341 .filter(t -> !t.equals(Constants.PARAM_TAG)) 342 .collect(Collectors.toList()); 343 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 344 paramNames.add(IAnyResource.SP_RES_ID); 345 } 346 if (myParams.containsKey(Constants.PARAM_TAG)) { 347 paramNames.add(Constants.PARAM_TAG); 348 } 349 350 // Handle each parameter 351 for (String nextParamName : paramNames) { 352 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 353 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 354 // Elasticsearch 355 continue; 356 } 357 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 358 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 359 .setParamName(nextParamName) 360 .setAndOrParams(andOrParams) 361 .setRequest(theRequest) 362 .setRequestPartitionId(myRequestPartitionId) 363 .setIncludeDeleted(myParams.getSearchIncludeDeletedMode())); 364 if (predicate != null) { 365 theSearchSqlBuilder.addPredicate(predicate); 366 } 367 } 368 } 369 370 /** 371 * This method returns <code>true</code> if the search is potentially a candidate for 372 * processing using a Combo SearchParameter. This means that: 373 * <ul> 374 * <li>Combo SearchParamdeters are enabled</li> 375 * <li>It's not an $everything search</li> 376 * <li>We're searching on a specific resource type</li> 377 * </ul> 378 */ 379 private boolean isComboSearchCandidate() { 380 return myStorageSettings.isUniqueIndexesEnabled() 381 && myParams.getEverythingMode() == null 382 && myResourceName != null; 383 } 384 385 @SuppressWarnings("ConstantConditions") 386 @Override 387 public Long createCountQuery( 388 SearchParameterMap theParams, 389 String theSearchUuid, 390 RequestDetails theRequest, 391 @Nonnull RequestPartitionId theRequestPartitionId) { 392 393 assert theRequestPartitionId != null; 394 assert TransactionSynchronizationManager.isActualTransactionActive(); 395 396 init(theParams, theSearchUuid, theRequestPartitionId); 397 398 if (checkUseHibernateSearch()) { 399 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 400 } 401 402 SearchQueryProperties properties = mySearchProperties.clone(); 403 properties.setDoCountOnlyFlag(true); 404 properties.setSortSpec(null); // counts don't require sorts 405 properties.setMaxResultsRequested(null); 406 properties.setOffset(null); 407 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 408 if (queries.isEmpty()) { 409 return 0L; 410 } else { 411 JpaPid jpaPid = queries.get(0).next(); 412 return jpaPid.getId(); 413 } 414 } 415 416 /** 417 * @param thePidSet May be null 418 */ 419 @Override 420 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 421 myPidSet = new HashSet<>(thePidSet); 422 } 423 424 @SuppressWarnings("ConstantConditions") 425 @Override 426 public IResultIterator<JpaPid> createQuery( 427 SearchParameterMap theParams, 428 SearchRuntimeDetails theSearchRuntimeDetails, 429 RequestDetails theRequest, 430 @Nonnull RequestPartitionId theRequestPartitionId) { 431 assert theRequestPartitionId != null; 432 assert TransactionSynchronizationManager.isActualTransactionActive(); 433 434 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 435 436 if (myPidSet == null) { 437 myPidSet = new HashSet<>(); 438 } 439 440 return new QueryIterator(theSearchRuntimeDetails, theRequest); 441 } 442 443 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 444 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 445 // we mutate the params. Make a private copy. 446 myParams = theParams.clone(); 447 mySearchProperties.setSortSpec(myParams.getSort()); 448 mySearchUuid = theSearchUuid; 449 myRequestPartitionId = theRequestPartitionId; 450 } 451 452 /** 453 * The query created can be either a count query or the 454 * actual query. 455 * This is why it takes a SearchQueryProperties object 456 * (and doesn't use the local version of it). 457 * The properties may differ slightly for whichever 458 * query this is. 459 */ 460 private List<ISearchQueryExecutor> createQuery( 461 SearchParameterMap theParams, 462 SearchQueryProperties theSearchProperties, 463 RequestDetails theRequest, 464 SearchRuntimeDetails theSearchRuntimeDetails) { 465 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 466 467 if (checkUseHibernateSearch()) { 468 // we're going to run at least part of the search against the Fulltext service. 469 470 // Ugh - we have two different return types for now 471 ISearchQueryExecutor fulltextExecutor = null; 472 List<JpaPid> fulltextMatchIds = null; 473 int resultCount = 0; 474 if (myParams.isLastN()) { 475 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 476 resultCount = fulltextMatchIds.size(); 477 } else if (myParams.getEverythingMode() != null) { 478 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 479 resultCount = fulltextMatchIds.size(); 480 } else { 481 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 482 // enabled SP indexing). 483 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 484 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 485 // extra work in Elastic. 486 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 487 488 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 489 // a page. 490 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 491 } 492 493 if (fulltextExecutor == null) { 494 fulltextExecutor = 495 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 496 } 497 498 if (theSearchRuntimeDetails != null) { 499 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 500 IInterceptorBroadcaster compositeBroadcaster = 501 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 502 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 503 HookParams params = new HookParams() 504 .add(RequestDetails.class, theRequest) 505 .addIfMatchesType(ServletRequestDetails.class, theRequest) 506 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 507 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 508 } 509 } 510 511 // can we skip the database entirely and return the pid list from here? 512 boolean canSkipDatabase = 513 // if we processed an AND clause, and it returned nothing, then nothing can match. 514 !fulltextExecutor.hasNext() 515 || 516 // Our hibernate search query doesn't respect partitions yet 517 (!myPartitionSettings.isPartitioningEnabled() 518 && 519 // were there AND terms left? Then we still need the db. 520 theParams.isEmpty() 521 && 522 // not every param is a param. :-( 523 theParams.getNearDistanceParam() == null 524 && 525 // todo MB don't we support _lastUpdated and _offset now? 526 theParams.getLastUpdated() == null 527 && theParams.getEverythingMode() == null 528 && theParams.getOffset() == null); 529 530 if (canSkipDatabase) { 531 ourLog.trace("Query finished after HSearch. Skip db query phase"); 532 if (theSearchProperties.hasMaxResultsRequested()) { 533 fulltextExecutor = SearchQueryExecutors.limited( 534 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 535 } 536 queries.add(fulltextExecutor); 537 } else { 538 ourLog.trace("Query needs db after HSearch. Chunking."); 539 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 540 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 541 QueryChunker.chunk( 542 fulltextExecutor, 543 SearchBuilder.getMaximumPageSize(), 544 // for each list of (SearchBuilder.getMaximumPageSize()) 545 // we create a chunked query and add it to 'queries' 546 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 547 } 548 } else { 549 // do everything in the database. 550 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 551 } 552 553 return queries; 554 } 555 556 /** 557 * Check to see if query should use Hibernate Search, and error if the query can't continue. 558 * 559 * @return true if the query should first be processed by Hibernate Search 560 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 561 */ 562 private boolean checkUseHibernateSearch() { 563 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 564 565 if (!fulltextEnabled) { 566 failIfUsed(Constants.PARAM_TEXT); 567 failIfUsed(Constants.PARAM_CONTENT); 568 } else { 569 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 570 final String paramName = sortSpec.getParamName(); 571 if (paramName.contains(".")) { 572 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 573 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 574 } 575 } 576 } 577 578 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 579 // can. 580 return fulltextEnabled 581 && myParams != null 582 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 583 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 584 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 585 } 586 587 private void failIfUsed(String theParamName) { 588 if (myParams.containsKey(theParamName)) { 589 throw new InvalidRequestException(Msg.code(1192) 590 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 591 } 592 } 593 594 private void failIfUsedWithChainedSort(String theParamName) { 595 if (myParams.containsKey(theParamName)) { 596 throw new InvalidRequestException(Msg.code(2524) 597 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 598 + theParamName); 599 } 600 } 601 602 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 603 // Can we use our hibernate search generated index on resource to support lastN?: 604 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 605 if (myFulltextSearchSvc == null) { 606 throw new InvalidRequestException(Msg.code(2027) 607 + "LastN operation is not enabled on this service, can not process this request"); 608 } 609 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 610 .map(t -> (JpaPid) t) 611 .collect(Collectors.toList()); 612 } else { 613 throw new InvalidRequestException( 614 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 615 } 616 } 617 618 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 619 JpaPid pid = null; 620 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 621 String idParamValue; 622 IQueryParameterType idParam = 623 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 624 if (idParam instanceof TokenParam idParm) { 625 idParamValue = idParm.getValue(); 626 } else { 627 StringParam idParm = (StringParam) idParam; 628 idParamValue = idParm.getValue(); 629 } 630 631 pid = myIdHelperService 632 .resolveResourceIdentity( 633 myRequestPartitionId, 634 myResourceName, 635 idParamValue, 636 ResolveIdentityMode.includeDeleted().cacheOk()) 637 .getPersistentId(); 638 } 639 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 640 } 641 642 private void doCreateChunkedQueries( 643 SearchParameterMap theParams, 644 List<JpaPid> thePids, 645 SearchQueryProperties theSearchQueryProperties, 646 RequestDetails theRequest, 647 ArrayList<ISearchQueryExecutor> theQueries) { 648 649 if (thePids.size() < getMaximumPageSize()) { 650 thePids = normalizeIdListForInClause(thePids); 651 } 652 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 653 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 654 } 655 656 /** 657 * Combs through the params for any _id parameters and extracts the PIDs for them 658 */ 659 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 660 // get all the IQueryParameterType objects 661 // for _id -> these should all be StringParam values 662 HashSet<IIdType> ids = new HashSet<>(); 663 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 664 for (List<IQueryParameterType> paramList : params) { 665 for (IQueryParameterType param : paramList) { 666 String id; 667 if (param instanceof StringParam) { 668 // we expect all _id values to be StringParams 669 id = ((StringParam) param).getValue(); 670 } else if (param instanceof TokenParam) { 671 id = ((TokenParam) param).getValue(); 672 } else { 673 // we do not expect the _id parameter to be a non-string value 674 throw new IllegalArgumentException( 675 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 676 } 677 678 IIdType idType = myContext.getVersion().newIdType(); 679 if (id.contains("/")) { 680 idType.setValue(id); 681 } else { 682 idType.setValue(myResourceName + "/" + id); 683 } 684 ids.add(idType); 685 } 686 } 687 688 // fetch our target Pids 689 // this will throw if an id is not found 690 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 691 myRequestPartitionId, 692 new ArrayList<>(ids), 693 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 694 695 // add the pids to targetPids 696 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 697 theTargetPids.add(pid.getPersistentId()); 698 } 699 } 700 701 private void createChunkedQuery( 702 SearchParameterMap theParams, 703 SearchQueryProperties theSearchProperties, 704 RequestDetails theRequest, 705 List<JpaPid> thePidList, 706 List<ISearchQueryExecutor> theSearchQueryExecutors) { 707 if (myParams.getEverythingMode() != null) { 708 createChunkedQueryForEverythingSearch( 709 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 710 } else { 711 createChunkedQueryNormalSearch( 712 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 713 } 714 } 715 716 private void createChunkedQueryNormalSearch( 717 SearchParameterMap theParams, 718 SearchQueryProperties theSearchProperties, 719 RequestDetails theRequest, 720 List<JpaPid> thePidList, 721 List<ISearchQueryExecutor> theSearchQueryExecutors) { 722 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 723 myContext, 724 myStorageSettings, 725 myPartitionSettings, 726 myRequestPartitionId, 727 myResourceName, 728 mySqlBuilderFactory, 729 myDialectProvider, 730 theSearchProperties.isDoCountOnlyFlag(), 731 myResourceName == null || myResourceName.isBlank()); 732 QueryStack queryStack3 = new QueryStack( 733 theRequest, 734 theParams, 735 myStorageSettings, 736 myContext, 737 sqlBuilder, 738 mySearchParamRegistry, 739 myPartitionSettings); 740 741 if (theParams.keySet().size() > 1 742 || theParams.getSort() != null 743 || theParams.keySet().contains(Constants.PARAM_HAS) 744 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 745 List<RuntimeSearchParam> activeComboParams = List.of(); 746 if (myResourceName != null) { 747 activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 748 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 749 } 750 if (activeComboParams.isEmpty()) { 751 sqlBuilder.setNeedResourceTableRoot(true); 752 } 753 } 754 755 /* 756 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 757 * specific filters with ORs as their root from working around the natural resource type / deletion 758 * status / partition IDs built into queries. 759 */ 760 if (theParams.containsKey(Constants.PARAM_FILTER)) { 761 Condition partitionIdPredicate = sqlBuilder 762 .getOrCreateResourceTablePredicateBuilder() 763 .createPartitionIdPredicate(myRequestPartitionId); 764 if (partitionIdPredicate != null) { 765 sqlBuilder.addPredicate(partitionIdPredicate); 766 } 767 } 768 769 // Normal search 770 // we will create a resourceTablePredicate if and only if we have an _id SP. 771 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 772 773 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 774 // partition ID predicate in that case. 775 if (!sqlBuilder.haveAtLeastOnePredicate()) { 776 Condition partitionIdPredicate; 777 778 if (theParams.getSearchIncludeDeletedMode() != null) { 779 partitionIdPredicate = sqlBuilder 780 .getOrCreateResourceTablePredicateBuilder(true, theParams.getSearchIncludeDeletedMode()) 781 .createPartitionIdPredicate(myRequestPartitionId); 782 } else { 783 partitionIdPredicate = sqlBuilder 784 .getOrCreateResourceTablePredicateBuilder() 785 .createPartitionIdPredicate(myRequestPartitionId); 786 } 787 788 if (partitionIdPredicate != null) { 789 sqlBuilder.addPredicate(partitionIdPredicate); 790 } 791 } 792 793 // Add PID list predicate for full text search and/or lastn operation 794 addPidListPredicate(thePidList, sqlBuilder); 795 796 // Last updated 797 addLastUpdatePredicate(sqlBuilder); 798 799 /* 800 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 801 * to something needed to guarantee correct results. 802 * 803 * Why do we need it? Suppose for example, a query like: 804 * Observation?category=foo,bar,baz 805 * And suppose you have many resources that have all 3 of these category codes. In this case 806 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 807 * we may exhaust the query results without getting enough distinct results back. When that 808 * happens we re-run the query with a larger limit. Excluding results we already know about 809 * tries to ensure that we get new unique results. 810 * 811 * The challenge with that though is that lots of DBs have an issue with too many 812 * parameters in one query. So we only do this optimization if there aren't too 813 * many results. 814 */ 815 if (myHasNextIteratorQuery) { 816 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 817 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 818 } 819 } 820 821 /* 822 * If offset is present, we want to deduplicate the results by using GROUP BY; 823 * OR 824 * if the MaxResultsToFetch is null, we are requesting "everything", 825 * so we'll let the db do the deduplication (instead of in-memory) 826 */ 827 if (theSearchProperties.isDeduplicateInDatabase()) { 828 queryStack3.addGrouping(); 829 queryStack3.setUseAggregate(true); 830 } 831 832 /* 833 * Sort 834 * 835 * If we have a sort, we wrap the criteria search (the search that actually 836 * finds the appropriate resources) in an outer search which is then sorted 837 */ 838 if (theSearchProperties.hasSort()) { 839 assert !theSearchProperties.isDoCountOnlyFlag(); 840 841 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 842 } 843 844 /* 845 * Now perform the search 846 */ 847 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 848 } 849 850 private void executeSearch( 851 SearchQueryProperties theProperties, 852 List<ISearchQueryExecutor> theSearchQueryExecutors, 853 SearchQueryBuilder sqlBuilder) { 854 GeneratedSql generatedSql = 855 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 856 if (!generatedSql.isMatchNothing()) { 857 SearchQueryExecutor executor = 858 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 859 theSearchQueryExecutors.add(executor); 860 } 861 } 862 863 private void createChunkedQueryForEverythingSearch( 864 RequestDetails theRequest, 865 SearchParameterMap theParams, 866 SearchQueryProperties theSearchQueryProperties, 867 List<JpaPid> thePidList, 868 List<ISearchQueryExecutor> theSearchQueryExecutors) { 869 870 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 871 myContext, 872 myStorageSettings, 873 myPartitionSettings, 874 myRequestPartitionId, 875 null, 876 mySqlBuilderFactory, 877 myDialectProvider, 878 theSearchQueryProperties.isDoCountOnlyFlag(), 879 false); 880 881 QueryStack queryStack3 = new QueryStack( 882 theRequest, 883 theParams, 884 myStorageSettings, 885 myContext, 886 sqlBuilder, 887 mySearchParamRegistry, 888 myPartitionSettings); 889 890 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 891 892 Set<JpaPid> targetPids = new HashSet<>(); 893 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 894 895 extractTargetPidsFromIdParams(targetPids); 896 897 // add the target pids to our executors as the first 898 // results iterator to go through 899 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 900 } else { 901 // For Everything queries, we make the query root by the ResourceLink table, since this query 902 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 903 // the one problem with this approach is that it doesn't catch Patients that have absolutely 904 // nothing linked to them. So we do one additional query to make sure we catch those too. 905 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 906 myContext, 907 myStorageSettings, 908 myPartitionSettings, 909 myRequestPartitionId, 910 myResourceName, 911 mySqlBuilderFactory, 912 myDialectProvider, 913 theSearchQueryProperties.isDoCountOnlyFlag(), 914 false); 915 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 916 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 917 String sql = allTargetsSql.getSql(); 918 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 919 920 List<JpaPid> output = 921 jdbcTemplate.query(sql, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled()), args); 922 923 // we add a search executor to fetch unlinked patients first 924 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 925 } 926 927 List<String> typeSourceResources = new ArrayList<>(); 928 if (myParams.get(Constants.PARAM_TYPE) != null) { 929 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 930 } 931 932 queryStack3.addPredicateEverythingOperation( 933 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 934 935 // Add PID list predicate for full text search and/or lastn operation 936 addPidListPredicate(thePidList, sqlBuilder); 937 938 /* 939 * If offset is present, we want deduplicate the results by using GROUP BY 940 * ORDER BY is required to make sure we return unique results for each page 941 */ 942 if (theSearchQueryProperties.hasOffset()) { 943 queryStack3.addGrouping(); 944 queryStack3.addOrdering(); 945 queryStack3.setUseAggregate(true); 946 } 947 948 if (myParams.getEverythingMode().isPatient()) { 949 /* 950 * NB: patient-compartment limitation 951 * 952 * We are manually excluding Group and List resources 953 * from the patient-compartment for $everything operations on Patient type/instance. 954 * 955 * See issue: https://github.com/hapifhir/hapi-fhir/issues/7118 956 */ 957 sqlBuilder.excludeResourceTypesPredicate( 958 SearchParameterUtil.RESOURCE_TYPES_TO_SP_TO_OMIT_FROM_PATIENT_COMPARTMENT.keySet()); 959 } 960 961 /* 962 * Now perform the search 963 */ 964 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 965 } 966 967 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 968 if (thePidList != null && !thePidList.isEmpty()) { 969 theSqlBuilder.addResourceIdsPredicate(thePidList); 970 } 971 } 972 973 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 974 DateRangeParam lu = myParams.getLastUpdated(); 975 if (lu != null && !lu.isEmpty()) { 976 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 977 theSqlBuilder.addPredicate(lastUpdatedPredicates); 978 } 979 } 980 981 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 982 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 983 jdbcTemplate.setFetchSize(myFetchSize); 984 if (theMaximumResults != null) { 985 jdbcTemplate.setMaxRows(theMaximumResults); 986 } 987 return jdbcTemplate; 988 } 989 990 private Collection<String> extractTypeSourceResourcesFromParams() { 991 992 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 993 994 // first off, let's flatten the list of list 995 List<IQueryParameterType> iQueryParameterTypesList = 996 listOfList.stream().flatMap(List::stream).toList(); 997 998 // then, extract all elements of each CSV into one big list 999 List<String> resourceTypes = iQueryParameterTypesList.stream() 1000 .map(param -> ((StringParam) param).getValue()) 1001 .map(csvString -> List.of(csvString.split(","))) 1002 .flatMap(List::stream) 1003 .toList(); 1004 1005 Set<String> knownResourceTypes = myContext.getResourceTypes(); 1006 1007 // remove leading/trailing whitespaces if any and remove duplicates 1008 Set<String> retVal = new HashSet<>(); 1009 1010 for (String type : resourceTypes) { 1011 String trimmed = type.trim(); 1012 if (!knownResourceTypes.contains(trimmed)) { 1013 throw new ResourceNotFoundException( 1014 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 1015 } 1016 retVal.add(trimmed); 1017 } 1018 1019 return retVal; 1020 } 1021 1022 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 1023 return myStorageSettings.isIndexOnContainedResources() 1024 && theParams.values().stream() 1025 .flatMap(Collection::stream) 1026 .flatMap(Collection::stream) 1027 .anyMatch(ReferenceParam.class::isInstance); 1028 } 1029 1030 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 1031 if (theSort == null || isBlank(theSort.getParamName())) { 1032 return; 1033 } 1034 1035 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 1036 1037 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1038 1039 theQueryStack.addSortOnResourceId(ascending); 1040 1041 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1042 1043 theQueryStack.addSortOnResourcePID(ascending); 1044 1045 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1046 1047 theQueryStack.addSortOnLastUpdated(ascending); 1048 1049 } else { 1050 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1051 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1052 1053 /* 1054 * If we have a sort like _sort=subject.name and we have an 1055 * uplifted refchain for that combination we can do it more efficiently 1056 * by using the index associated with the uplifted refchain. In this case, 1057 * we need to find the actual target search parameter (corresponding 1058 * to "name" in this example) so that we know what datatype it is. 1059 */ 1060 String paramName = theSort.getParamName(); 1061 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1062 String[] chains = StringUtils.split(paramName, '.'); 1063 if (chains.length == 2) { 1064 1065 // Given: Encounter?_sort=Patient:subject.name 1066 String referenceParam = chains[0]; // subject 1067 String referenceParamTargetType = null; // Patient 1068 String targetParam = chains[1]; // name 1069 1070 int colonIdx = referenceParam.indexOf(':'); 1071 if (colonIdx > -1) { 1072 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1073 referenceParam = referenceParam.substring(colonIdx + 1); 1074 } 1075 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1076 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1077 if (outerParam == null) { 1078 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1079 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1080 for (String nextTargetType : outerParam.getTargets()) { 1081 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1082 continue; 1083 } 1084 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1085 nextTargetType, 1086 targetParam, 1087 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1088 if (innerParam != null) { 1089 param = innerParam; 1090 break; 1091 } 1092 } 1093 } 1094 } 1095 } 1096 1097 int colonIdx = paramName.indexOf(':'); 1098 String referenceTargetType = null; 1099 if (colonIdx > -1) { 1100 referenceTargetType = paramName.substring(0, colonIdx); 1101 paramName = paramName.substring(colonIdx + 1); 1102 } 1103 1104 int dotIdx = paramName.indexOf('.'); 1105 String chainName = null; 1106 if (param == null && dotIdx > -1) { 1107 chainName = paramName.substring(dotIdx + 1); 1108 paramName = paramName.substring(0, dotIdx); 1109 if (chainName.contains(".")) { 1110 String msg = myContext 1111 .getLocalizer() 1112 .getMessageSanitized( 1113 BaseStorageDao.class, 1114 "invalidSortParameterTooManyChains", 1115 paramName + "." + chainName); 1116 throw new InvalidRequestException(Msg.code(2286) + msg); 1117 } 1118 } 1119 1120 if (param == null) { 1121 param = mySearchParamRegistry.getActiveSearchParam( 1122 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1123 } 1124 1125 if (param == null) { 1126 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1127 } 1128 1129 // param will never be null here (the above line throws if it does) 1130 // this is just to prevent the warning 1131 assert param != null; 1132 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1133 throw new InvalidRequestException( 1134 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1135 } 1136 1137 switch (param.getParamType()) { 1138 case STRING: 1139 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1140 break; 1141 case DATE: 1142 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1143 break; 1144 case REFERENCE: 1145 theQueryStack.addSortOnResourceLink( 1146 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1147 break; 1148 case TOKEN: 1149 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1150 break; 1151 case NUMBER: 1152 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1153 break; 1154 case URI: 1155 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1156 break; 1157 case QUANTITY: 1158 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1159 break; 1160 case COMPOSITE: 1161 List<JpaParamUtil.ComponentAndCorrespondingParam> compositeList = 1162 JpaParamUtil.resolveCompositeComponents(mySearchParamRegistry, param); 1163 if (compositeList == null) { 1164 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1165 + " is not defined by the resource " + myResourceName); 1166 } 1167 if (compositeList.size() != 2) { 1168 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1169 + " must have 2 composite types declared in parameter annotation, found " 1170 + compositeList.size()); 1171 } 1172 RuntimeSearchParam left = compositeList.get(0).getComponentParameter(); 1173 RuntimeSearchParam right = compositeList.get(1).getComponentParameter(); 1174 1175 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1176 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1177 1178 break; 1179 case SPECIAL: 1180 if (LOCATION_POSITION.equals(param.getPath())) { 1181 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1182 break; 1183 } 1184 throw new InvalidRequestException( 1185 Msg.code(2306) + "This server does not support _sort specifications of type " 1186 + param.getParamType() + " - Can't serve _sort=" + paramName); 1187 1188 case HAS: 1189 default: 1190 throw new InvalidRequestException( 1191 Msg.code(1197) + "This server does not support _sort specifications of type " 1192 + param.getParamType() + " - Can't serve _sort=" + paramName); 1193 } 1194 } 1195 1196 // Recurse 1197 createSort(theQueryStack, theSort.getChain(), theParams); 1198 } 1199 1200 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1201 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1202 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1203 String msg = myContext 1204 .getLocalizer() 1205 .getMessageSanitized( 1206 BaseStorageDao.class, 1207 "invalidSortParameter", 1208 theParamName, 1209 theResourceName, 1210 validSearchParameterNames); 1211 throw new InvalidRequestException(Msg.code(1194) + msg); 1212 } 1213 1214 private void createCompositeSort( 1215 QueryStack theQueryStack, 1216 RestSearchParameterTypeEnum theParamType, 1217 String theParamName, 1218 boolean theAscending) { 1219 1220 switch (theParamType) { 1221 case STRING: 1222 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1223 break; 1224 case DATE: 1225 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1226 break; 1227 case TOKEN: 1228 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1229 break; 1230 case QUANTITY: 1231 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1232 break; 1233 case NUMBER: 1234 case REFERENCE: 1235 case COMPOSITE: 1236 case URI: 1237 case HAS: 1238 case SPECIAL: 1239 default: 1240 throw new InvalidRequestException( 1241 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1242 + " on _sort=" + theParamName); 1243 } 1244 } 1245 1246 private void doLoadPids( 1247 RequestDetails theRequest, 1248 Collection<JpaPid> thePids, 1249 Collection<JpaPid> theIncludedPids, 1250 List<IBaseResource> theResourceListToPopulate, 1251 boolean theForHistoryOperation, 1252 Map<Long, Integer> thePosition) { 1253 1254 Map<JpaPid, Long> resourcePidToVersion = null; 1255 for (JpaPid next : thePids) { 1256 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1257 if (resourcePidToVersion == null) { 1258 resourcePidToVersion = new HashMap<>(); 1259 } 1260 resourcePidToVersion.put(next, next.getVersion()); 1261 } 1262 } 1263 1264 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1265 int expectedCount = versionlessPids.size(); 1266 if (versionlessPids.size() < getMaximumPageSize()) { 1267 /* 1268 * This method adds a bunch of extra params to the end of the parameter list 1269 * which are for a resource PID that will never exist (-1 / NO_MORE). We do this 1270 * so that the database can rely on a cached execution plan since we're not 1271 * generating a new SQL query for every possible number of resources. 1272 */ 1273 versionlessPids = normalizeIdListForInClause(versionlessPids); 1274 } 1275 1276 // Load the resource bodies 1277 List<ResourceHistoryTable> resourceSearchViewList = 1278 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable(versionlessPids); 1279 1280 /* 1281 * If we have specific versions to load, replace the history entries with the 1282 * correct ones 1283 * 1284 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1285 * version entity first, and by batching the fetches. But this is a fairly infrequently 1286 * used feature, and loading history entities by PK is a very efficient query so it's 1287 * not the end of the world 1288 */ 1289 if (resourcePidToVersion != null) { 1290 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1291 ResourceHistoryTable next = resourceSearchViewList.get(i); 1292 JpaPid resourceId = next.getPersistentId(); 1293 Long version = resourcePidToVersion.get(resourceId); 1294 resourceId.setVersion(version); 1295 if (version != null && !version.equals(next.getVersion())) { 1296 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1297 next.getResourceId().toFk(), version); 1298 resourceSearchViewList.set(i, replacement); 1299 } 1300 } 1301 } 1302 1303 /* 1304 * If we got fewer rows back than we expected, that means that one or more ResourceTable 1305 * entities (HFJ_RESOURCE) have a RES_VER version which doesn't exist in the 1306 * ResourceHistoryTable (HFJ_RES_VER) table. This should never happen under normal 1307 * operation, but if someone manually deletes a row or otherwise ends up in a weird 1308 * state it can happen. In that case, we do a manual process of figuring out what 1309 * is the right version. 1310 */ 1311 if (resourceSearchViewList.size() != expectedCount) { 1312 1313 Set<JpaPid> loadedPks = resourceSearchViewList.stream() 1314 .map(ResourceHistoryTable::getResourceId) 1315 .collect(Collectors.toSet()); 1316 for (JpaPid nextWantedPid : versionlessPids) { 1317 if (!nextWantedPid.equals(NO_MORE) && !loadedPks.contains(nextWantedPid)) { 1318 Optional<ResourceHistoryTable> latestVersion = findLatestVersion( 1319 theRequest, nextWantedPid, myResourceHistoryTableDao, myInterceptorBroadcaster); 1320 latestVersion.ifPresent(resourceSearchViewList::add); 1321 } 1322 } 1323 } 1324 1325 // -- preload all tags with tag definition if any 1326 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1327 1328 for (ResourceHistoryTable next : resourceSearchViewList) { 1329 if (next.getDeleted() != null) { 1330 continue; 1331 } 1332 1333 Class<? extends IBaseResource> resourceType = 1334 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1335 1336 JpaPid resourceId = next.getPersistentId(); 1337 1338 if (resourcePidToVersion != null) { 1339 Long version = resourcePidToVersion.get(resourceId); 1340 resourceId.setVersion(version); 1341 } 1342 1343 IBaseResource resource; 1344 resource = myJpaStorageResourceParser.toResource( 1345 theRequest, resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1346 if (resource == null) { 1347 ourLog.warn( 1348 "Unable to find resource {}/{}/_history/{} in database", 1349 next.getResourceType(), 1350 next.getIdDt().getIdPart(), 1351 next.getVersion()); 1352 continue; 1353 } 1354 1355 Integer index = thePosition.get(resourceId.getId()); 1356 if (index == null) { 1357 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1358 continue; 1359 } 1360 1361 if (theIncludedPids.contains(resourceId)) { 1362 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1363 } else { 1364 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1365 } 1366 1367 // ensure there's enough space; "<=" because of 0-indexing 1368 while (theResourceListToPopulate.size() <= index) { 1369 theResourceListToPopulate.add(null); 1370 } 1371 theResourceListToPopulate.set(index, resource); 1372 } 1373 } 1374 1375 @SuppressWarnings("OptionalIsPresent") 1376 @Nonnull 1377 public static Optional<ResourceHistoryTable> findLatestVersion( 1378 RequestDetails theRequest, 1379 JpaPid nextWantedPid, 1380 IResourceHistoryTableDao resourceHistoryTableDao, 1381 IInterceptorBroadcaster interceptorBroadcaster1) { 1382 assert nextWantedPid != null && !nextWantedPid.equals(NO_MORE); 1383 1384 Optional<ResourceHistoryTable> latestVersion = resourceHistoryTableDao 1385 .findVersionsForResource(JpaConstants.SINGLE_RESULT, nextWantedPid.toFk()) 1386 .findFirst(); 1387 String warning; 1388 if (latestVersion.isPresent()) { 1389 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1390 + " specifies an unknown current version, returning version " 1391 + latestVersion.get().getVersion() 1392 + " instead. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1393 } else { 1394 warning = "Database resource entry (HFJ_RESOURCE) with PID " + nextWantedPid 1395 + " specifies an unknown current version, and no versions of this resource exist. This invalid entry has a negative impact on performance; consider performing an appropriate $reindex to correct your data."; 1396 } 1397 1398 IInterceptorBroadcaster interceptorBroadcaster = 1399 CompositeInterceptorBroadcaster.newCompositeBroadcaster(interceptorBroadcaster1, theRequest); 1400 logAndBoradcastWarning(theRequest, warning, interceptorBroadcaster); 1401 return latestVersion; 1402 } 1403 1404 private static void logAndBoradcastWarning( 1405 RequestDetails theRequest, String warning, IInterceptorBroadcaster interceptorBroadcaster) { 1406 ourLog.warn(warning); 1407 1408 if (interceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING)) { 1409 HookParams params = new HookParams(); 1410 params.add(RequestDetails.class, theRequest); 1411 params.addIfMatchesType(ServletRequestDetails.class, theRequest); 1412 params.add(StorageProcessingMessage.class, new StorageProcessingMessage().setMessage(warning)); 1413 interceptorBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_WARNING, params); 1414 } 1415 } 1416 1417 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1418 return switch (myStorageSettings.getTagStorageMode()) { 1419 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1420 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1421 case INLINE -> Map.of(); 1422 }; 1423 } 1424 1425 @Nonnull 1426 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1427 Collection<ResourceHistoryTable> theHistoryTables) { 1428 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1429 1430 // -- find all resource has tags 1431 for (ResourceHistoryTable resource : theHistoryTables) { 1432 if (resource.isHasTags()) { 1433 idList.add(resource.getId()); 1434 } 1435 } 1436 1437 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1438 1439 // -- no tags 1440 if (idList.isEmpty()) { 1441 return tagMap; 1442 } 1443 1444 // -- get all tags for the idList 1445 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1446 1447 // -- build the map, key = resourceId, value = list of ResourceTag 1448 JpaPid resourceId; 1449 Collection<BaseTag> tagCol; 1450 for (ResourceHistoryTag tag : tagList) { 1451 1452 resourceId = tag.getResourcePid(); 1453 tagCol = tagMap.get(resourceId); 1454 if (tagCol == null) { 1455 tagCol = new ArrayList<>(); 1456 tagCol.add(tag); 1457 tagMap.put(resourceId, tagCol); 1458 } else { 1459 tagCol.add(tag); 1460 } 1461 } 1462 1463 return tagMap; 1464 } 1465 1466 @Nonnull 1467 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1468 Collection<ResourceHistoryTable> theHistoryTables) { 1469 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1470 1471 // -- find all resource has tags 1472 for (ResourceHistoryTable resource : theHistoryTables) { 1473 if (resource.isHasTags()) { 1474 idList.add(resource.getResourceId()); 1475 } 1476 } 1477 1478 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1479 1480 // -- no tags 1481 if (idList.isEmpty()) { 1482 return tagMap; 1483 } 1484 1485 // -- get all tags for the idList 1486 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1487 1488 // -- build the map, key = resourceId, value = list of ResourceTag 1489 JpaPid resourceId; 1490 Collection<BaseTag> tagCol; 1491 for (ResourceTag tag : tagList) { 1492 1493 resourceId = tag.getResourceId(); 1494 tagCol = tagMap.get(resourceId); 1495 if (tagCol == null) { 1496 tagCol = new ArrayList<>(); 1497 tagCol.add(tag); 1498 tagMap.put(resourceId, tagCol); 1499 } else { 1500 tagCol.add(tag); 1501 } 1502 } 1503 1504 return tagMap; 1505 } 1506 1507 @Override 1508 public void loadResourcesByPid( 1509 Collection<JpaPid> thePids, 1510 Collection<JpaPid> theIncludedPids, 1511 List<IBaseResource> theResourceListToPopulate, 1512 boolean theForHistoryOperation, 1513 RequestDetails theRequestDetails) { 1514 if (thePids.isEmpty()) { 1515 ourLog.debug("The include pids are empty"); 1516 } 1517 1518 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1519 // when running asserts 1520 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1521 1522 Map<Long, Integer> position = new HashMap<>(); 1523 int index = 0; 1524 for (JpaPid next : thePids) { 1525 position.put(next.getId(), index++); 1526 } 1527 1528 // Can we fast track this loading by checking elastic search? 1529 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1530 if (isUsingElasticSearch) { 1531 try { 1532 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1533 return; 1534 1535 } catch (ResourceNotFoundInIndexException theE) { 1536 // some resources were not found in index, so we will inform this and resort to JPA search 1537 ourLog.warn( 1538 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1539 } 1540 } 1541 1542 // We only chunk because some jdbc drivers can't handle long param lists. 1543 QueryChunker.chunk( 1544 thePids, 1545 t -> doLoadPids( 1546 theRequestDetails, 1547 t, 1548 theIncludedPids, 1549 theResourceListToPopulate, 1550 theForHistoryOperation, 1551 position)); 1552 } 1553 1554 /** 1555 * Check if we can load the resources from Hibernate Search instead of the database. 1556 * We assume this is faster. 1557 * <p> 1558 * Hibernate Search only stores the current version, and only if enabled. 1559 * 1560 * @param thePids the pids to check for versioned references 1561 * @return can we fetch from Hibernate Search? 1562 */ 1563 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1564 // is storage enabled? 1565 return myStorageSettings.isStoreResourceInHSearchIndex() 1566 && myStorageSettings.isHibernateSearchIndexSearchParams() 1567 && 1568 // we don't support history 1569 thePids.stream().noneMatch(p -> p.getVersion() != null) 1570 && 1571 // skip the complexity for metadata in dstu2 1572 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1573 } 1574 1575 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1576 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1577 // only impl 1578 // to handle lastN? 1579 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1580 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1581 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1582 1583 return myFulltextSearchSvc.getResources(pidList); 1584 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1585 // legacy LastN implementation 1586 return myIElasticsearchSvc.getObservationResources(thePids); 1587 } else { 1588 return Collections.emptyList(); 1589 } 1590 } 1591 1592 /** 1593 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1594 * so it can't be Collections.emptySet() or some such thing. 1595 * The JpaPid returned will have resource type populated. 1596 */ 1597 @Override 1598 public Set<JpaPid> loadIncludes( 1599 FhirContext theContext, 1600 EntityManager theEntityManager, 1601 Collection<JpaPid> theMatches, 1602 Collection<Include> theIncludes, 1603 boolean theReverseMode, 1604 DateRangeParam theLastUpdated, 1605 String theSearchIdOrDescription, 1606 RequestDetails theRequest, 1607 Integer theMaxCount) { 1608 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1609 parameters.setFhirContext(theContext); 1610 parameters.setEntityManager(theEntityManager); 1611 parameters.setMatches(theMatches); 1612 parameters.setIncludeFilters(theIncludes); 1613 parameters.setReverseMode(theReverseMode); 1614 parameters.setLastUpdated(theLastUpdated); 1615 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1616 parameters.setRequestDetails(theRequest); 1617 parameters.setMaxCount(theMaxCount); 1618 return loadIncludes(parameters); 1619 } 1620 1621 @Override 1622 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1623 Collection<JpaPid> matches = theParameters.getMatches(); 1624 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1625 boolean reverseMode = theParameters.isReverseMode(); 1626 EntityManager entityManager = theParameters.getEntityManager(); 1627 Integer maxCount = theParameters.getMaxCount(); 1628 FhirContext fhirContext = theParameters.getFhirContext(); 1629 RequestDetails request = theParameters.getRequestDetails(); 1630 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1631 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1632 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1633 IInterceptorBroadcaster compositeBroadcaster = 1634 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1635 1636 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1637 CurrentThreadCaptureQueriesListener.startCapturing(); 1638 } 1639 if (matches.isEmpty()) { 1640 return new HashSet<>(); 1641 } 1642 if (currentIncludes == null || currentIncludes.isEmpty()) { 1643 return new HashSet<>(); 1644 } 1645 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1646 String searchPartitionIdFieldName = 1647 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1648 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1649 String findPartitionIdFieldName = 1650 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1651 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1652 String findVersionFieldName = null; 1653 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1654 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1655 } 1656 1657 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1658 HashSet<JpaPid> allAdded = new HashSet<>(); 1659 HashSet<JpaPid> original = new HashSet<>(matches); 1660 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1661 1662 int roundCounts = 0; 1663 StopWatch w = new StopWatch(); 1664 1665 boolean addedSomeThisRound; 1666 do { 1667 roundCounts++; 1668 1669 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1670 1671 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1672 Include nextInclude = iter.next(); 1673 if (!nextInclude.isRecurse()) { 1674 iter.remove(); 1675 } 1676 1677 // Account for _include=* 1678 boolean matchAll = "*".equals(nextInclude.getValue()); 1679 1680 // Account for _include=[resourceType]:* 1681 String wantResourceType = null; 1682 if (!matchAll) { 1683 if ("*".equals(nextInclude.getParamName())) { 1684 wantResourceType = nextInclude.getParamType(); 1685 matchAll = true; 1686 } 1687 } 1688 1689 if (matchAll) { 1690 loadIncludesMatchAll( 1691 findPidFieldName, 1692 findPartitionIdFieldName, 1693 findResourceTypeFieldName, 1694 findVersionFieldName, 1695 searchPidFieldName, 1696 searchPartitionIdFieldName, 1697 wantResourceType, 1698 reverseMode, 1699 hasDesiredResourceTypes, 1700 nextRoundMatches, 1701 entityManager, 1702 maxCount, 1703 desiredResourceTypes, 1704 pidsToInclude, 1705 request); 1706 } else { 1707 loadIncludesMatchSpecific( 1708 nextInclude, 1709 fhirContext, 1710 findPidFieldName, 1711 findPartitionIdFieldName, 1712 findVersionFieldName, 1713 searchPidFieldName, 1714 reverseMode, 1715 nextRoundMatches, 1716 entityManager, 1717 maxCount, 1718 pidsToInclude, 1719 request); 1720 } 1721 } 1722 1723 nextRoundMatches.clear(); 1724 for (JpaPid next : pidsToInclude) { 1725 if (!original.contains(next) && !allAdded.contains(next)) { 1726 nextRoundMatches.add(next); 1727 } else { 1728 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1729 } 1730 } 1731 1732 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1733 1734 if (maxCount != null && allAdded.size() >= maxCount) { 1735 break; 1736 } 1737 1738 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1739 1740 allAdded.removeAll(original); 1741 1742 ourLog.info( 1743 "Loaded {} {} in {} rounds and {} ms for search {}", 1744 allAdded.size(), 1745 reverseMode ? "_revincludes" : "_includes", 1746 roundCounts, 1747 w.getMillisAndRestart(), 1748 searchIdOrDescription); 1749 1750 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1751 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1752 } 1753 1754 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1755 // This can be used to remove results from the search result details before 1756 // the user has a chance to know that they were in the results 1757 if (!allAdded.isEmpty()) { 1758 1759 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1760 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1761 JpaPreResourceAccessDetails accessDetails = 1762 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1763 HookParams params = new HookParams() 1764 .add(IPreResourceAccessDetails.class, accessDetails) 1765 .add(RequestDetails.class, request) 1766 .addIfMatchesType(ServletRequestDetails.class, request); 1767 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1768 1769 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1770 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1771 JpaPid value = includedPidList.remove(i); 1772 if (value != null) { 1773 allAdded.remove(value); 1774 } 1775 } 1776 } 1777 } 1778 } 1779 1780 return allAdded; 1781 } 1782 1783 private void loadIncludesMatchSpecific( 1784 Include nextInclude, 1785 FhirContext fhirContext, 1786 String findPidFieldName, 1787 String findPartitionFieldName, 1788 String findVersionFieldName, 1789 String searchPidFieldName, 1790 boolean reverseMode, 1791 List<JpaPid> nextRoundMatches, 1792 EntityManager entityManager, 1793 Integer maxCount, 1794 HashSet<JpaPid> pidsToInclude, 1795 RequestDetails theRequest) { 1796 List<String> paths; 1797 1798 // Start replace 1799 RuntimeSearchParam param; 1800 String resType = nextInclude.getParamType(); 1801 if (isBlank(resType)) { 1802 return; 1803 } 1804 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1805 if (def == null) { 1806 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1807 return; 1808 } 1809 1810 String paramName = nextInclude.getParamName(); 1811 if (isNotBlank(paramName)) { 1812 param = mySearchParamRegistry.getActiveSearchParam( 1813 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1814 } else { 1815 param = null; 1816 } 1817 if (param == null) { 1818 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1819 return; 1820 } 1821 1822 paths = param.getPathsSplitForResourceType(resType); 1823 // end replace 1824 1825 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1826 1827 for (String nextPath : paths) { 1828 String findPidFieldSqlColumn = 1829 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1830 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1831 if (findVersionFieldName != null) { 1832 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1833 } 1834 if (myPartitionSettings.isDatabasePartitionMode()) { 1835 fieldsToLoad += ", r."; 1836 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1837 ? "partition_id" 1838 : "target_res_partition_id"; 1839 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1840 } 1841 1842 // Query for includes lookup has 2 cases 1843 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1844 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1845 // url in target_resource_url 1846 1847 // Case 1: 1848 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1849 1850 String searchPidFieldSqlColumn = 1851 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1852 StringBuilder localReferenceQuery = new StringBuilder(); 1853 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1854 localReferenceQuery.append(" FROM hfj_res_link r "); 1855 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1856 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1857 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1858 } 1859 localReferenceQuery 1860 .append(" AND r.") 1861 .append(searchPidFieldSqlColumn) 1862 .append(" IN (:target_pids) "); 1863 if (myPartitionSettings.isDatabasePartitionMode()) { 1864 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1865 ? "target_res_partition_id" 1866 : "partition_id"; 1867 localReferenceQuery 1868 .append("AND r.") 1869 .append(partitionFieldToSearch) 1870 .append(" = :search_partition_id "); 1871 } 1872 localReferenceQueryParams.put("src_path", nextPath); 1873 // we loop over target_pids later. 1874 if (targetResourceTypes != null) { 1875 if (targetResourceTypes.size() == 1) { 1876 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1877 localReferenceQueryParams.put( 1878 "target_resource_type", 1879 targetResourceTypes.iterator().next()); 1880 } else { 1881 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1882 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1883 } 1884 } 1885 1886 // Case 2: 1887 Pair<String, Map<String, Object>> canonicalQuery = 1888 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1889 1890 String sql = localReferenceQuery.toString(); 1891 if (canonicalQuery != null) { 1892 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1893 } 1894 1895 Map<String, Object> limitParams = new HashMap<>(); 1896 if (maxCount != null) { 1897 LinkedList<Object> bindVariables = new LinkedList<>(); 1898 sql = SearchQueryBuilder.applyLimitToSql( 1899 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1900 1901 // The dialect SQL limiter uses positional params, but we're using 1902 // named params here, so we need to replace the positional params 1903 // with equivalent named ones 1904 StringBuilder sb = new StringBuilder(); 1905 for (int i = 0; i < sql.length(); i++) { 1906 char nextChar = sql.charAt(i); 1907 if (nextChar == '?') { 1908 String nextName = "limit" + i; 1909 sb.append(':').append(nextName); 1910 limitParams.put(nextName, bindVariables.removeFirst()); 1911 } else { 1912 sb.append(nextChar); 1913 } 1914 } 1915 sql = sb.toString(); 1916 } 1917 1918 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1919 for (Collection<JpaPid> nextPartition : partitions) { 1920 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1921 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1922 if (myPartitionSettings.isDatabasePartitionMode()) { 1923 q.setParameter( 1924 "search_partition_id", 1925 nextPartition.iterator().next().getPartitionId()); 1926 } 1927 localReferenceQueryParams.forEach(q::setParameter); 1928 if (canonicalQuery != null) { 1929 canonicalQuery.getRight().forEach(q::setParameter); 1930 } 1931 limitParams.forEach(q::setParameter); 1932 1933 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1934 Tuple result; 1935 while (iter.hasNext()) { 1936 result = iter.next(); 1937 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1938 Long resourceVersion = null; 1939 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1940 resourceVersion = 1941 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1942 } 1943 Integer partitionId = null; 1944 if (myPartitionSettings.isDatabasePartitionMode()) { 1945 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1946 } 1947 1948 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1949 pid.setPartitionId(partitionId); 1950 pidsToInclude.add(pid); 1951 } 1952 } 1953 // myEntityManager.clear(); 1954 } 1955 } 1956 } 1957 1958 private void loadIncludesMatchAll( 1959 String findPidFieldName, 1960 String findPartitionFieldName, 1961 String findResourceTypeFieldName, 1962 String findVersionFieldName, 1963 String searchPidFieldName, 1964 String searchPartitionFieldName, 1965 String wantResourceType, 1966 boolean reverseMode, 1967 boolean hasDesiredResourceTypes, 1968 List<JpaPid> nextRoundMatches, 1969 EntityManager entityManager, 1970 Integer maxCount, 1971 List<String> desiredResourceTypes, 1972 HashSet<JpaPid> pidsToInclude, 1973 RequestDetails request) { 1974 1975 record IncludesRecord( 1976 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1977 1978 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1979 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1980 Root<ResourceLink> root = query.from(ResourceLink.class); 1981 1982 List<Selection<?>> selectionList = new ArrayList<>(); 1983 selectionList.add(root.get(findPidFieldName)); 1984 selectionList.add(root.get(findResourceTypeFieldName)); 1985 selectionList.add(root.get("myTargetResourceUrl")); 1986 if (findVersionFieldName != null) { 1987 selectionList.add(root.get(findVersionFieldName)); 1988 } else { 1989 selectionList.add(cb.nullLiteral(Long.class)); 1990 } 1991 if (myPartitionSettings.isDatabasePartitionMode()) { 1992 selectionList.add(root.get(findPartitionFieldName)); 1993 } else { 1994 selectionList.add(cb.nullLiteral(Integer.class)); 1995 } 1996 query.multiselect(selectionList); 1997 1998 List<Predicate> predicates = new ArrayList<>(); 1999 2000 if (myPartitionSettings.isDatabasePartitionMode()) { 2001 predicates.add( 2002 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 2003 } 2004 2005 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 2006 2007 /* 2008 * We need to set the resource type in 2 cases only: 2009 * 1) we are in $everything mode 2010 * (where we only want to fetch specific resource types, regardless of what is 2011 * available to fetch) 2012 * 2) we are doing revincludes 2013 * 2014 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 2015 * should always be checking the source resource type on the resource link. We don't 2016 * actually index that column though by default, so in order to try and be efficient 2017 * we don't actually include it for includes (but we do for revincludes). This is 2018 * because for an include, it doesn't really make sense to include a different 2019 * resource type than the one you are searching on. 2020 */ 2021 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 2022 // because mySourceResourceType is not part of the HFJ_RES_LINK 2023 // index, this might not be the most optimal performance. 2024 // but it is for an $everything operation (and maybe we should update the index) 2025 predicates.add( 2026 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 2027 } else { 2028 wantResourceType = null; 2029 } 2030 2031 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 2032 // resources 2033 // (e.g. via Provenance, List, or Group) when in an $everything operation 2034 if (myParams != null 2035 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 2036 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 2037 predicates.add(cb.not(root.get("mySourceResourceType") 2038 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 2039 } 2040 2041 if (hasDesiredResourceTypes) { 2042 predicates.add( 2043 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 2044 } 2045 2046 query.where(cb.and(predicates.toArray(new Predicate[0]))); 2047 2048 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 2049 for (Collection<JpaPid> nextPartition : partitions) { 2050 2051 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 2052 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 2053 if (myPartitionSettings.isDatabasePartitionMode()) { 2054 q.setParameter( 2055 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 2056 } 2057 if (wantResourceType != null) { 2058 q.setParameter("want_resource_type", wantResourceType); 2059 } 2060 if (maxCount != null) { 2061 q.setMaxResults(maxCount); 2062 } 2063 if (hasDesiredResourceTypes) { 2064 q.setParameter("desired_target_resource_types", desiredResourceTypes); 2065 } 2066 2067 Set<String> canonicalUrls = null; 2068 2069 try (ScrollableResultsIterator<IncludesRecord> iter = 2070 new ScrollableResultsIterator<>(toScrollableResults(q))) { 2071 IncludesRecord nextRow; 2072 while (iter.hasNext()) { 2073 nextRow = iter.next(); 2074 if (nextRow == null) { 2075 // This can happen if there are outgoing references which are canonical or point to 2076 // other servers 2077 continue; 2078 } 2079 2080 Long version = nextRow.version; 2081 Long resourceId = nextRow.resourceId; 2082 String resourceType = nextRow.resourceType; 2083 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 2084 Integer partitionId = nextRow.partitionId; 2085 2086 if (resourceId != null) { 2087 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 2088 pid.setPartitionId(partitionId); 2089 pidsToInclude.add(pid); 2090 } else if (resourceCanonicalUrl != null) { 2091 if (canonicalUrls == null) { 2092 canonicalUrls = new HashSet<>(); 2093 } 2094 canonicalUrls.add(resourceCanonicalUrl); 2095 } 2096 } 2097 } 2098 2099 if (canonicalUrls != null) { 2100 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 2101 } 2102 } 2103 } 2104 2105 private void loadCanonicalUrls( 2106 RequestDetails theRequestDetails, 2107 Set<String> theCanonicalUrls, 2108 EntityManager theEntityManager, 2109 HashSet<JpaPid> thePidsToInclude, 2110 boolean theReverse) { 2111 StringBuilder sqlBuilder; 2112 CanonicalUrlTargets canonicalUrlTargets = 2113 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 2114 if (canonicalUrlTargets.isEmpty()) { 2115 return; 2116 } 2117 2118 String message = 2119 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2120 firePerformanceWarning(theRequestDetails, message); 2121 2122 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2123 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2124 2125 sqlBuilder = new StringBuilder(); 2126 sqlBuilder.append("SELECT "); 2127 if (myPartitionSettings.isPartitioningEnabled()) { 2128 sqlBuilder.append("i.myPartitionIdValue, "); 2129 } 2130 sqlBuilder.append("i.myResourcePid "); 2131 2132 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2133 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2134 sqlBuilder.append("AND i.myUri IN (:uris)"); 2135 2136 String canonicalResSql = sqlBuilder.toString(); 2137 2138 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2139 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2140 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2141 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2142 List<Object[]> results = canonicalResIdQuery.getResultList(); 2143 for (var next : results) { 2144 if (next != null) { 2145 Integer partitionId = null; 2146 Long pid; 2147 if (next.length == 1) { 2148 pid = (Long) next[0]; 2149 } else { 2150 partitionId = (Integer) ((Object[]) next)[0]; 2151 pid = (Long) ((Object[]) next)[1]; 2152 } 2153 if (pid != null) { 2154 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2155 } 2156 } 2157 } 2158 } 2159 } 2160 2161 /** 2162 * Calls Performance Trace Hook 2163 * 2164 * @param request the request deatils 2165 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2166 */ 2167 private void callRawSqlHookWithCurrentThreadQueries( 2168 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2169 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2170 HookParams params = new HookParams() 2171 .add(RequestDetails.class, request) 2172 .addIfMatchesType(ServletRequestDetails.class, request) 2173 .add(SqlQueryList.class, capturedQueries); 2174 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2175 } 2176 2177 @Nullable 2178 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2179 String targetResourceType = nextInclude.getParamTargetType(); 2180 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2181 Set<String> targetResourceTypes; 2182 if (targetResourceType != null) { 2183 targetResourceTypes = Set.of(targetResourceType); 2184 } else if (haveTargetTypesDefinedByParam) { 2185 targetResourceTypes = param.getTargets(); 2186 } else { 2187 // all types! 2188 targetResourceTypes = null; 2189 } 2190 return targetResourceTypes; 2191 } 2192 2193 @Nullable 2194 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2195 String theVersionFieldName, 2196 Set<String> theTargetResourceTypes, 2197 boolean theReverse, 2198 RequestDetails theRequest, 2199 RuntimeSearchParam theParam) { 2200 2201 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2202 2203 // If we know for sure that none of the paths involved in this SearchParameter could 2204 // be indexing a canonical 2205 if (Arrays.stream(searchParameterPaths) 2206 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2207 myContext, myResourceName, t, theReverse))) { 2208 return null; 2209 } 2210 2211 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2212 if (theVersionFieldName != null) { 2213 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2214 fieldsToLoadFromSpidxUriTable += ", NULL"; 2215 } 2216 2217 if (myPartitionSettings.isDatabasePartitionMode()) { 2218 if (theReverse) { 2219 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2220 } else { 2221 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2222 } 2223 } 2224 2225 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2226 // But sp_name isn't indexed, so we use hash_identity instead. 2227 CanonicalUrlTargets canonicalUrlTargets = 2228 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2229 if (canonicalUrlTargets.isEmpty()) { 2230 return null; 2231 } 2232 2233 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2234 StringBuilder canonicalUrlQuery = new StringBuilder(); 2235 canonicalUrlQuery 2236 .append("SELECT ") 2237 .append(fieldsToLoadFromSpidxUriTable) 2238 .append(' '); 2239 canonicalUrlQuery.append("FROM hfj_res_link r "); 2240 2241 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2242 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2243 if (myPartitionSettings.isDatabasePartitionMode()) { 2244 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2245 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2246 } 2247 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2248 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2249 canonicalUriQueryParams.put( 2250 "uri_identity_hash", 2251 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2252 } else { 2253 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2254 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2255 } 2256 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2257 canonicalUrlQuery.append(")"); 2258 2259 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2260 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2261 canonicalUrlQuery.append(" AND"); 2262 if (myPartitionSettings.isDatabasePartitionMode()) { 2263 if (theReverse) { 2264 canonicalUrlQuery.append(" rUri.partition_id"); 2265 } else { 2266 canonicalUrlQuery.append(" r.partition_id"); 2267 } 2268 canonicalUrlQuery.append(" = :search_partition_id"); 2269 canonicalUrlQuery.append(" AND"); 2270 } 2271 if (theReverse) { 2272 canonicalUrlQuery.append(" rUri.res_id"); 2273 } else { 2274 canonicalUrlQuery.append(" r.src_resource_id"); 2275 } 2276 canonicalUrlQuery.append(" IN (:target_pids)"); 2277 2278 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2279 } 2280 2281 @Nonnull 2282 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2283 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2284 Set<String> targetResourceTypes = theTargetResourceTypes; 2285 if (targetResourceTypes == null) { 2286 /* 2287 * If we don't have a list of valid target types, we need to figure out a list of all 2288 * possible target types in order to perform the search of the URI index table. This is 2289 * because the hash_identity column encodes the resource type, so we'll need a hash 2290 * value for each possible target type. 2291 */ 2292 targetResourceTypes = new HashSet<>(); 2293 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2294 if (theReverse) { 2295 // For reverse includes, it is really hard to figure out what types 2296 // are actually potentially pointing to the type we're searching for 2297 // in this context, so let's just assume it could be anything. 2298 targetResourceTypes = possibleTypes; 2299 } else { 2300 List<RuntimeSearchParam> params = mySearchParamRegistry 2301 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2302 .values() 2303 .stream() 2304 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2305 .toList(); 2306 for (var next : params) { 2307 2308 String paths = next.getPath(); 2309 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2310 2311 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2312 myContext, myResourceName, path, theReverse)) { 2313 continue; 2314 } 2315 2316 if (!next.getTargets().isEmpty()) { 2317 // For each reference parameter on the resource type we're searching for, 2318 // add all the potential target types to the list of possible target 2319 // resource types we can look up. 2320 for (var nextTarget : next.getTargets()) { 2321 if (possibleTypes.contains(nextTarget)) { 2322 targetResourceTypes.add(nextTarget); 2323 } 2324 } 2325 } else { 2326 // If we have any references that don't define any target types, then 2327 // we need to assume that all enabled resource types are possible target 2328 // types 2329 targetResourceTypes.addAll(possibleTypes); 2330 break; 2331 } 2332 } 2333 } 2334 } 2335 } 2336 2337 if (targetResourceTypes.isEmpty()) { 2338 return new CanonicalUrlTargets(Set.of(), Set.of()); 2339 } 2340 2341 Set<Long> hashIdentityValues = new HashSet<>(); 2342 Set<Integer> partitionIds = new HashSet<>(); 2343 for (String type : targetResourceTypes) { 2344 2345 RequestPartitionId readPartition; 2346 if (myPartitionSettings.isPartitioningEnabled()) { 2347 readPartition = 2348 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2349 } else { 2350 readPartition = RequestPartitionId.defaultPartition(); 2351 } 2352 if (readPartition.hasPartitionIds()) { 2353 partitionIds.addAll(readPartition.getPartitionIds()); 2354 } 2355 2356 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2357 myPartitionSettings, readPartition, type, "url"); 2358 hashIdentityValues.add(hashIdentity); 2359 } 2360 2361 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2362 } 2363 2364 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2365 public boolean isEmpty() { 2366 return hashIdentityValues.isEmpty(); 2367 } 2368 } 2369 2370 /** 2371 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2372 * those pids where: 2373 * <ul> 2374 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2375 * <li>Each list only contains JpaPids with the same partition ID</li> 2376 * </ul> 2377 */ 2378 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2379 2380 if (theNextRoundMatches.size() <= theMaxLoad) { 2381 boolean allSamePartition = true; 2382 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2383 if (!Objects.equals( 2384 theNextRoundMatches.get(i - 1).getPartitionId(), 2385 theNextRoundMatches.get(i).getPartitionId())) { 2386 allSamePartition = false; 2387 break; 2388 } 2389 } 2390 if (allSamePartition) { 2391 return Collections.singletonList(theNextRoundMatches); 2392 } 2393 } 2394 2395 // Break into partitioned sublists 2396 ListMultimap<String, JpaPid> lists = 2397 MultimapBuilder.hashKeys().arrayListValues().build(); 2398 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2399 String partitionId = nextRoundMatch.getPartitionId() != null 2400 ? nextRoundMatch.getPartitionId().toString() 2401 : ""; 2402 lists.put(partitionId, nextRoundMatch); 2403 } 2404 2405 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2406 for (String key : lists.keySet()) { 2407 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2408 retVal.addAll(nextPartition); 2409 } 2410 2411 // In unit test mode, we sort the results just for unit test predictability 2412 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2413 retVal = retVal.stream() 2414 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2415 .collect(Collectors.toList()); 2416 } 2417 2418 return retVal; 2419 } 2420 2421 /** 2422 * If any Combo SearchParameters match the given query parameters, add a predicate 2423 * to {@literal theQueryStack} and remove the parameters from {@literal theParams}. 2424 * This method handles both UNIQUE and NON_UNIQUE combo parameters. 2425 */ 2426 private void attemptComboSearchParameterProcessing( 2427 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2428 2429 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2430 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2431 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2432 2433 List<JpaParamUtil.ComponentAndCorrespondingParam> nextCandidateComponents = 2434 JpaParamUtil.resolveCompositeComponents(mySearchParamRegistry, nextCandidate); 2435 2436 /* 2437 * First, a quick and dirty check to see if we have a parameter in the current search 2438 * that contains all the parameters for the candidate combo search parameter. We do 2439 * a more nuanced check later to make sure that the parameters have appropriate values, 2440 * modifiers, etc. so this doesn't need to be perfect in terms of rejecting bad matches. 2441 * It just needs to fail fast if the search couldn't possibly be a match for the 2442 * candidate so we can move on quickly. 2443 */ 2444 boolean noMatch = false; 2445 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : nextCandidateComponents) { 2446 if (!theParams.containsKey(nextComponent.getParamName()) 2447 && !theParams.containsKey(nextComponent.getCombinedParamName())) { 2448 noMatch = true; 2449 break; 2450 } 2451 } 2452 if (noMatch) { 2453 continue; 2454 } 2455 2456 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : nextCandidateComponents) { 2457 ensureSubListsAreWritable(theParams.get(nextComponent.getParamName())); 2458 ensureSubListsAreWritable(theParams.get(nextComponent.getCombinedParamName())); 2459 } 2460 2461 /* 2462 * Apply search against the combo param index in a loop: 2463 * 2464 * 1. First we check whether the actual parameter values in the 2465 * parameter map are actually usable for searching against the combo 2466 * param index. E.g. no search modifiers, date comparators, etc., 2467 * since these mean you can't use the combo index. 2468 * 2469 * 2. Apply and create the join SQl. We remove parameter values from 2470 * the map as we apply them, so any parameter values remaining in the 2471 * map after each loop haven't yet been factored into the SQL. 2472 * 2473 * The loop allows us to create multiple combo index joins if there 2474 * are multiple AND expressions for the related parameters. 2475 */ 2476 boolean matched; 2477 do { 2478 matched = applyComboSearchParamIfAppropriate( 2479 theRequest, theQueryStack, theParams, nextCandidate, nextCandidateComponents); 2480 } while (matched); 2481 } 2482 } 2483 2484 /** 2485 * Attempts to apply a Combo SearchParameter to the current search. Assuming some or all parameters of 2486 * the search are appropriate for the given Combo SearchParameter, a predicate is created and added to 2487 * the QueryStack, and the parameters are removed from the search parameters map. 2488 * 2489 * @param theRequest The RequestDetails for the current search. 2490 * @param theQueryStack The current SQL builder QueryStack to add a predicate to. 2491 * @param theParams The search parameters for the current search. 2492 * @param theComboParam The Combo SearchParameter to apply. 2493 * @param theComboParamComponents The components of the Combo SearchParameter. 2494 * @return Returns <code>true</code> if the Combo SearchParameter was applied successfully. 2495 */ 2496 private boolean applyComboSearchParamIfAppropriate( 2497 RequestDetails theRequest, 2498 QueryStack theQueryStack, 2499 @Nonnull SearchParameterMap theParams, 2500 RuntimeSearchParam theComboParam, 2501 List<JpaParamUtil.ComponentAndCorrespondingParam> theComboParamComponents) { 2502 2503 List<List<IQueryParameterType>> inputs = new ArrayList<>(theComboParamComponents.size()); 2504 List<Runnable> searchParameterConsumerTasks = new ArrayList<>(theComboParamComponents.size()); 2505 for (JpaParamUtil.ComponentAndCorrespondingParam nextComponent : theComboParamComponents) { 2506 boolean foundMatch = false; 2507 2508 /* 2509 * The following List<List<IQueryParameterType>> is a list of query parameters where the 2510 * outer list contains AND combinations, and the inner lists contain OR combinations. 2511 * For each component in the Combo SearchParameter, we need to find a list of OR parameters 2512 * (i.e. the inner List) which is appropriate for the given component. 2513 * 2514 * We can only use a combo param when the query parameter is fairly basic 2515 * (no modifiers such as :missing or :below, references are qualified with 2516 * a resource type, etc.) Once we've confirmed that we have a parameter for 2517 * each component, we remove the components from the source SearchParameterMap 2518 * since we're going to consume them and add a predicate to the SQL builder. 2519 */ 2520 List<List<IQueryParameterType>> sameNameParametersAndList = theParams.get(nextComponent.getParamName()); 2521 if (sameNameParametersAndList != null) { 2522 boolean parameterIsChained = false; 2523 for (int andIndex = 0; andIndex < sameNameParametersAndList.size(); andIndex++) { 2524 List<IQueryParameterType> sameNameParametersOrList = sameNameParametersAndList.get(andIndex); 2525 IQueryParameterType firstValue = sameNameParametersOrList.get(0); 2526 2527 if (firstValue instanceof ReferenceParam refParam) { 2528 if (!Objects.equals(nextComponent.getChain(), refParam.getChain())) { 2529 continue; 2530 } 2531 } 2532 2533 if (!validateParamValuesAreValidForComboParam( 2534 theRequest, theParams, theComboParam, nextComponent, sameNameParametersOrList)) { 2535 continue; 2536 } 2537 2538 inputs.add(sameNameParametersOrList); 2539 searchParameterConsumerTasks.add(() -> sameNameParametersAndList.remove(sameNameParametersOrList)); 2540 foundMatch = true; 2541 break; 2542 } 2543 } else if (!nextComponent.getParamName().equals(nextComponent.getCombinedParamName())) { 2544 2545 /* 2546 * If we didn't find any parameters for the parameter name (e.g. "patient") and 2547 * we're looking for a chained parameter (e.g. "patient.identifier"), check if 2548 * there are any matches for the full combined parameter name 2549 * (e.g. "patient.identifier"). 2550 */ 2551 List<List<IQueryParameterType>> combinedNameParametersAndList = 2552 theParams.get(nextComponent.getCombinedParamName()); 2553 if (combinedNameParametersAndList != null) { 2554 for (int andIndex = 0; andIndex < combinedNameParametersAndList.size(); andIndex++) { 2555 List<IQueryParameterType> combinedNameParametersOrList = 2556 combinedNameParametersAndList.get(andIndex); 2557 if (!combinedNameParametersOrList.isEmpty()) { 2558 2559 if (!validateParamValuesAreValidForComboParam( 2560 theRequest, 2561 theParams, 2562 theComboParam, 2563 nextComponent, 2564 combinedNameParametersOrList)) { 2565 continue; 2566 } 2567 2568 inputs.add(combinedNameParametersOrList); 2569 searchParameterConsumerTasks.add( 2570 () -> combinedNameParametersAndList.remove(combinedNameParametersOrList)); 2571 foundMatch = true; 2572 break; 2573 } 2574 } 2575 } 2576 } 2577 2578 if (!foundMatch) { 2579 return false; 2580 } 2581 } 2582 2583 if (CartesianProductUtil.calculateCartesianProductSize(inputs) > 500) { 2584 ourLog.debug( 2585 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2586 return false; 2587 } 2588 2589 searchParameterConsumerTasks.forEach(Runnable::run); 2590 2591 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2592 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2593 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2594 2595 List<String> parameters = new ArrayList<>(); 2596 for (int paramIndex = 0; paramIndex < theComboParamComponents.size(); paramIndex++) { 2597 2598 JpaParamUtil.ComponentAndCorrespondingParam componentAndCorrespondingParam = 2599 theComboParamComponents.get(paramIndex); 2600 String nextParamName = componentAndCorrespondingParam.getCombinedParamName(); 2601 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2602 2603 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2604 // As a result, we strip the prefix if present. 2605 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(), EQUAL.getValue()); 2606 2607 RestSearchParameterTypeEnum paramType = JpaParamUtil.getParameterTypeForComposite( 2608 mySearchParamRegistry, componentAndCorrespondingParam); 2609 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2610 if (paramType == RestSearchParameterTypeEnum.STRING) { 2611 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2612 } 2613 } 2614 2615 if (paramType == RestSearchParameterTypeEnum.TOKEN) { 2616 2617 /* 2618 * The gender SP indexes a fixed binding ValueSet with a single CodeSystem, so we 2619 * infer the codesystem just to be friendly to clients who don't provide it 2620 * in the search. 2621 */ 2622 if ("gender".equals(componentAndCorrespondingParam.getParamName()) 2623 || "gender".equals(componentAndCorrespondingParam.getChain())) { 2624 if (!nextOrValue.contains("|")) { 2625 nextOrValue = "http://hl7.org/fhir/administrative-gender|" + nextOrValue; 2626 } 2627 } 2628 } 2629 2630 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2631 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2632 2633 parameters.add(nextParamName + "=" + nextOrValue); 2634 } 2635 2636 // Make sure the parameters end up in the search URL in the same order 2637 // we would index them in (we also alphabetically sort when we create 2638 // the index rows) 2639 Collections.sort(parameters); 2640 2641 StringBuilder searchStringBuilder = new StringBuilder(); 2642 searchStringBuilder.append(myResourceName); 2643 for (int i = 0; i < parameters.size(); i++) { 2644 if (i == 0) { 2645 searchStringBuilder.append("?"); 2646 } else { 2647 searchStringBuilder.append("&"); 2648 } 2649 searchStringBuilder.append(parameters.get(i)); 2650 } 2651 2652 String indexString = searchStringBuilder.toString(); 2653 ourLog.debug( 2654 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2655 2656 indexStrings.add(indexString); 2657 } 2658 2659 // Just to make sure we're stable for tests 2660 indexStrings.sort(Comparator.naturalOrder()); 2661 2662 // Interceptor broadcast: JPA_PERFTRACE_INFO 2663 IInterceptorBroadcaster compositeBroadcaster = 2664 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2665 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2666 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2667 StorageProcessingMessage msg = new StorageProcessingMessage() 2668 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2669 + indexStringForLog); 2670 HookParams params = new HookParams() 2671 .add(RequestDetails.class, theRequest) 2672 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2673 .add(StorageProcessingMessage.class, msg); 2674 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2675 } 2676 2677 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2678 case UNIQUE: 2679 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2680 break; 2681 case NON_UNIQUE: 2682 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2683 break; 2684 } 2685 2686 // Remove any empty parameters remaining after this 2687 theParams.clean(); 2688 2689 return true; 2690 } 2691 2692 /** 2693 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2694 * searching against a combo param with the given parameter names. This might be {@literal false} if 2695 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2696 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2697 */ 2698 private boolean validateParamValuesAreValidForComboParam( 2699 RequestDetails theRequest, 2700 @Nonnull SearchParameterMap theParams, 2701 RuntimeSearchParam theComboParam, 2702 JpaParamUtil.ComponentAndCorrespondingParam theComboComponent, 2703 List<IQueryParameterType> theValues) { 2704 2705 for (IQueryParameterType nextOrValue : theValues) { 2706 if (nextOrValue instanceof DateParam dateParam) { 2707 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2708 String message = "Search with params " + describeParams(theParams) 2709 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2710 + theComboComponent.getCombinedParamName() + "'"; 2711 firePerformanceInfo(theRequest, message); 2712 return false; 2713 } 2714 } 2715 2716 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2717 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2718 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2719 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2720 if (prefix != null && prefix != EQUAL) { 2721 String message = "Search with params " + describeParams(theParams) 2722 + " is not a candidate for combo searching - Parameter '" 2723 + theComboComponent.getCombinedParamName() 2724 + "' has prefix: '" 2725 + paramWithPrefix.getPrefix().getValue() + "'"; 2726 firePerformanceInfo(theRequest, message); 2727 return false; 2728 } 2729 } 2730 2731 // Reference params are only eligible for using a composite index if they 2732 // are qualified 2733 boolean haveChain = false; 2734 if (nextOrValue instanceof ReferenceParam refParam) { 2735 haveChain = refParam.hasChain(); 2736 if (theComboComponent.getChain() == null && isBlank(refParam.getResourceType())) { 2737 String message = 2738 "Search is not a candidate for unique combo searching - Reference with no type specified for parameter '" 2739 + theComboComponent.getCombinedParamName() + "'"; 2740 firePerformanceInfo(theRequest, message); 2741 return false; 2742 } 2743 } 2744 2745 // Qualifiers such as :missing can't be resolved by a combo param 2746 if (!haveChain && isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2747 String message = "Search with params " + describeParams(theParams) 2748 + " is not a candidate for combo searching - Parameter '" 2749 + theComboComponent.getCombinedParamName() 2750 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2751 firePerformanceInfo(theRequest, message); 2752 return false; 2753 } 2754 2755 // Date params are not eligible for using composite unique index 2756 // as index could contain date with different precision (e.g. DAY, SECOND) 2757 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2758 if (nextOrValue instanceof DateParam) { 2759 ourLog.debug( 2760 "Search with params {} is not a candidate for combo searching - " 2761 + "Unique combo search parameter '{}' has DATE type", 2762 describeParams(theParams), 2763 theComboComponent); 2764 return false; 2765 } 2766 } 2767 } 2768 2769 return true; 2770 } 2771 2772 @Nonnull 2773 private static String describeParams(@Nonnull SearchParameterMap theParams) { 2774 return '[' + theParams.keySet().stream().sorted().collect(Collectors.joining(", ")) + ']'; 2775 } 2776 2777 private <T> void ensureSubListsAreWritable(@Nullable List<List<T>> theListOfLists) { 2778 if (theListOfLists != null) { 2779 for (int i = 0; i < theListOfLists.size(); i++) { 2780 List<T> oldSubList = theListOfLists.get(i); 2781 if (!(oldSubList instanceof ArrayList)) { 2782 List<T> newSubList = new ArrayList<>(oldSubList); 2783 theListOfLists.set(i, newSubList); 2784 } 2785 } 2786 } 2787 } 2788 2789 @Override 2790 public void setFetchSize(int theFetchSize) { 2791 myFetchSize = theFetchSize; 2792 } 2793 2794 public SearchParameterMap getParams() { 2795 return myParams; 2796 } 2797 2798 public CriteriaBuilder getBuilder() { 2799 return myCriteriaBuilder; 2800 } 2801 2802 public Class<? extends IBaseResource> getResourceType() { 2803 return myResourceType; 2804 } 2805 2806 public String getResourceName() { 2807 return myResourceName; 2808 } 2809 2810 /** 2811 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2812 */ 2813 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2814 2815 private final RequestDetails myRequest; 2816 private final Set<JpaPid> myCurrentPids; 2817 private Iterator<JpaPid> myCurrentIterator; 2818 private JpaPid myNext; 2819 2820 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2821 myCurrentPids = new HashSet<>(thePidSet); 2822 myCurrentIterator = null; 2823 myRequest = theRequest; 2824 } 2825 2826 private void fetchNext() { 2827 while (myNext == null) { 2828 2829 if (myCurrentIterator == null) { 2830 Set<Include> includes = new HashSet<>(); 2831 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2832 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2833 for (IQueryParameterType type : typeList) { 2834 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken()); 2835 for (String resourceType : queryString.split(",")) { 2836 String rt = resourceType.trim(); 2837 if (isNotBlank(rt)) { 2838 includes.add(new Include(rt + ":*", true)); 2839 } 2840 } 2841 } 2842 } 2843 } 2844 if (includes.isEmpty()) { 2845 includes.add(new Include("*", true)); 2846 } 2847 Set<JpaPid> newPids = loadIncludes( 2848 myContext, 2849 myEntityManager, 2850 myCurrentPids, 2851 includes, 2852 false, 2853 getParams().getLastUpdated(), 2854 mySearchUuid, 2855 myRequest, 2856 null); 2857 myCurrentIterator = newPids.iterator(); 2858 } 2859 2860 if (myCurrentIterator.hasNext()) { 2861 myNext = myCurrentIterator.next(); 2862 } else { 2863 myNext = NO_MORE; 2864 } 2865 } 2866 } 2867 2868 @Override 2869 public boolean hasNext() { 2870 fetchNext(); 2871 return !NO_MORE.equals(myNext); 2872 } 2873 2874 @Override 2875 public JpaPid next() { 2876 fetchNext(); 2877 JpaPid retVal = myNext; 2878 myNext = null; 2879 return retVal; 2880 } 2881 } 2882 /** 2883 * Basic Query iterator, used to fetch the results of a query. 2884 */ 2885 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2886 2887 private final SearchRuntimeDetails mySearchRuntimeDetails; 2888 2889 private final RequestDetails myRequest; 2890 private final boolean myHaveRawSqlHooks; 2891 private final boolean myHavePerfTraceFoundIdHook; 2892 private final Integer myOffset; 2893 private final IInterceptorBroadcaster myCompositeBroadcaster; 2894 private boolean myFirst = true; 2895 private IncludesIterator myIncludesIterator; 2896 /** 2897 * The next JpaPid value of the next result in this query. 2898 * Will not be null if fetched using getNext() 2899 */ 2900 private JpaPid myNext; 2901 /** 2902 * The current query result iterator running sql and supplying PIDs 2903 * @see #myQueryList 2904 */ 2905 private ISearchQueryExecutor myResultsIterator; 2906 2907 private boolean myFetchIncludesForEverythingOperation; 2908 2909 /** 2910 * The count of resources skipped because they were seen in earlier results 2911 */ 2912 private int mySkipCount = 0; 2913 /** 2914 * The count of resources that are new in this search 2915 * (ie, not cached in previous searches) 2916 */ 2917 private int myNonSkipCount = 0; 2918 /** 2919 * The list of queries to use to find all results. 2920 * Normal JPA queries will normally have a single entry. 2921 * Queries that involve Hibernate Search/Elasticsearch may have 2922 * multiple queries because of chunking. 2923 * The $everything operation also jams some extra results in. 2924 */ 2925 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2926 2927 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2928 mySearchRuntimeDetails = theSearchRuntimeDetails; 2929 myOffset = myParams.getOffset(); 2930 myRequest = theRequest; 2931 myCompositeBroadcaster = 2932 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2933 2934 // everything requires fetching recursively all related resources 2935 if (myParams.getEverythingMode() != null) { 2936 myFetchIncludesForEverythingOperation = true; 2937 } 2938 2939 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2940 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2941 } 2942 2943 private void fetchNext() { 2944 try { 2945 if (myHaveRawSqlHooks) { 2946 CurrentThreadCaptureQueriesListener.startCapturing(); 2947 } 2948 2949 // If we don't have a query yet, create one 2950 if (myResultsIterator == null) { 2951 if (!mySearchProperties.hasMaxResultsRequested()) { 2952 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2953 } 2954 2955 /* 2956 * assigns the results iterator 2957 * and populates the myQueryList. 2958 */ 2959 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2960 } 2961 2962 if (myNext == null) { 2963 // no next means we need a new query (if one is available) 2964 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2965 /* 2966 * Because we combine our DB searches with Lucene 2967 * sometimes we can have multiple results iterators 2968 * (with only some having data in them to extract). 2969 * 2970 * We'll iterate our results iterators until we 2971 * either run out of results iterators, or we 2972 * have one that actually has data in it. 2973 */ 2974 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2975 retrieveNextIteratorQuery(); 2976 } 2977 2978 if (!myResultsIterator.hasNext()) { 2979 // we couldn't find a results iterator; 2980 // we're done here 2981 break; 2982 } 2983 2984 JpaPid nextPid = myResultsIterator.next(); 2985 if (myHavePerfTraceFoundIdHook) { 2986 callPerformanceTracingHook(nextPid); 2987 } 2988 2989 if (nextPid != null) { 2990 if (!myPidSet.contains(nextPid)) { 2991 if (!mySearchProperties.isDeduplicateInDatabase()) { 2992 /* 2993 * We only add to the map if we aren't fetching "everything"; 2994 * otherwise, we let the de-duplication happen in the database 2995 * (see createChunkedQueryNormalSearch above), because it 2996 * saves memory that way. 2997 */ 2998 myPidSet.add(nextPid); 2999 } 3000 if (doNotSkipNextPidForEverything()) { 3001 myNext = nextPid; 3002 myNonSkipCount++; 3003 break; 3004 } 3005 } else { 3006 mySkipCount++; 3007 } 3008 } 3009 3010 if (!myResultsIterator.hasNext()) { 3011 if (mySearchProperties.hasMaxResultsRequested() 3012 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 3013 if (mySkipCount > 0 && myNonSkipCount == 0) { 3014 sendProcessingMsgAndFirePerformanceHook(); 3015 // need the next iterator; increase the maxsize 3016 // (we should always do this) 3017 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 3018 mySearchProperties.setMaxResultsRequested(maxResults); 3019 3020 if (!mySearchProperties.isDeduplicateInDatabase()) { 3021 // if we're not using the database to deduplicate 3022 // we should recheck our memory usage 3023 // the prefetch size check is future proofing 3024 int prefetchSize = myStorageSettings 3025 .getSearchPreFetchThresholds() 3026 .size(); 3027 if (prefetchSize > 0) { 3028 if (myStorageSettings 3029 .getSearchPreFetchThresholds() 3030 .get(prefetchSize - 1) 3031 < mySearchProperties.getMaxResultsRequested()) { 3032 mySearchProperties.setDeduplicateInDatabase(true); 3033 } 3034 } 3035 } 3036 3037 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 3038 } 3039 } 3040 } 3041 } 3042 } 3043 3044 if (myNext == null) { 3045 // if we got here, it means the current JpaPid has already been processed, 3046 // and we will decide (here) if we need to fetch related resources recursively 3047 if (myFetchIncludesForEverythingOperation) { 3048 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 3049 myFetchIncludesForEverythingOperation = false; 3050 } 3051 if (myIncludesIterator != null) { 3052 while (myIncludesIterator.hasNext()) { 3053 JpaPid next = myIncludesIterator.next(); 3054 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 3055 myNext = next; 3056 break; 3057 } 3058 } 3059 if (myNext == null) { 3060 myNext = NO_MORE; 3061 } 3062 } else { 3063 myNext = NO_MORE; 3064 } 3065 } 3066 3067 if (!mySearchProperties.hasMaxResultsRequested()) { 3068 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 3069 } else { 3070 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 3071 } 3072 3073 } finally { 3074 // search finished - fire hooks 3075 if (myHaveRawSqlHooks) { 3076 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 3077 } 3078 } 3079 3080 if (myFirst) { 3081 HookParams params = new HookParams() 3082 .add(RequestDetails.class, myRequest) 3083 .addIfMatchesType(ServletRequestDetails.class, myRequest) 3084 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 3085 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 3086 myFirst = false; 3087 } 3088 3089 if (NO_MORE.equals(myNext)) { 3090 HookParams params = new HookParams() 3091 .add(RequestDetails.class, myRequest) 3092 .addIfMatchesType(ServletRequestDetails.class, myRequest) 3093 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 3094 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 3095 } 3096 } 3097 3098 private Integer calculateMaxResultsToFetch() { 3099 if (myParams.isLoadSynchronous()) { 3100 // this might be null - we support this for streaming. 3101 return myParams.getLoadSynchronousUpTo(); 3102 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 3103 return myParams.getEverythingMode() != null 3104 ? myParams.getOffset() + myParams.getCount() 3105 : myParams.getCount(); 3106 } else { 3107 return myStorageSettings.getFetchSizeDefaultMaximum(); 3108 } 3109 } 3110 3111 private boolean doNotSkipNextPidForEverything() { 3112 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 3113 } 3114 3115 private void callPerformanceTracingHook(JpaPid theNextPid) { 3116 HookParams params = new HookParams() 3117 .add(Integer.class, System.identityHashCode(this)) 3118 .add(Object.class, theNextPid); 3119 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 3120 } 3121 3122 private void sendProcessingMsgAndFirePerformanceHook() { 3123 String msg = "Pass completed with no matching results seeking rows " 3124 + myPidSet.size() + "-" + mySkipCount 3125 + ". This indicates an inefficient query! Retrying with new max count of " 3126 + mySearchProperties.getMaxResultsRequested(); 3127 firePerformanceWarning(myRequest, msg); 3128 } 3129 3130 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 3131 Integer offset = theOffset; 3132 if (myQueryList.isEmpty()) { 3133 // Capture times for Lucene/Elasticsearch queries as well 3134 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3135 3136 // setting offset to 0 to fetch all resource ids to guarantee 3137 // correct output result for everything operation during paging 3138 if (myParams.getEverythingMode() != null) { 3139 offset = 0; 3140 } 3141 3142 SearchQueryProperties properties = mySearchProperties.clone(); 3143 properties 3144 .setOffset(offset) 3145 .setMaxResultsRequested(theMaxResultsToFetch) 3146 .setDoCountOnlyFlag(false) 3147 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 3148 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 3149 } 3150 3151 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 3152 3153 retrieveNextIteratorQuery(); 3154 3155 mySkipCount = 0; 3156 myNonSkipCount = 0; 3157 } 3158 3159 private void retrieveNextIteratorQuery() { 3160 close(); 3161 if (isNotEmpty(myQueryList)) { 3162 myResultsIterator = myQueryList.remove(0); 3163 myHasNextIteratorQuery = true; 3164 } else { 3165 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 3166 myHasNextIteratorQuery = false; 3167 } 3168 } 3169 3170 @Override 3171 public boolean hasNext() { 3172 if (myNext == null) { 3173 fetchNext(); 3174 } 3175 return !NO_MORE.equals(myNext); 3176 } 3177 3178 @Override 3179 public JpaPid next() { 3180 fetchNext(); 3181 JpaPid retVal = myNext; 3182 myNext = null; 3183 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 3184 return retVal; 3185 } 3186 3187 @Override 3188 public int getSkippedCount() { 3189 return mySkipCount; 3190 } 3191 3192 @Override 3193 public int getNonSkippedCount() { 3194 return myNonSkipCount; 3195 } 3196 3197 @Override 3198 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 3199 Collection<JpaPid> batch = new ArrayList<>(); 3200 while (this.hasNext() && batch.size() < theBatchSize) { 3201 batch.add(this.next()); 3202 } 3203 return batch; 3204 } 3205 3206 @Override 3207 public void close() { 3208 if (myResultsIterator != null) { 3209 myResultsIterator.close(); 3210 } 3211 myResultsIterator = null; 3212 } 3213 } 3214 3215 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 3216 // Only log at debug level since these messages aren't considered important enough 3217 // that we should be cluttering the system log, but they are important to the 3218 // specific query being executed to we'll INFO level them there 3219 ourLog.debug(theMessage); 3220 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 3221 } 3222 3223 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 3224 ourLog.warn(theMessage); 3225 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 3226 } 3227 3228 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 3229 IInterceptorBroadcaster compositeBroadcaster = 3230 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 3231 if (compositeBroadcaster.hasHooks(thePointcut)) { 3232 StorageProcessingMessage message = new StorageProcessingMessage(); 3233 message.setMessage(theMessage); 3234 HookParams params = new HookParams() 3235 .add(RequestDetails.class, theRequest) 3236 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3237 .add(StorageProcessingMessage.class, message); 3238 compositeBroadcaster.callHooks(thePointcut, params); 3239 } 3240 } 3241 3242 public static int getMaximumPageSize() { 3243 if (myMaxPageSizeForTests != null) { 3244 return myMaxPageSizeForTests; 3245 } 3246 return MAXIMUM_PAGE_SIZE; 3247 } 3248 3249 public static void setMaxPageSizeForTest(Integer theTestSize) { 3250 myMaxPageSizeForTests = theTestSize; 3251 } 3252 3253 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3254 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3255 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3256 } 3257}