
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.ComboSearchParamType; 023import ca.uhn.fhir.context.FhirContext; 024import ca.uhn.fhir.context.FhirVersionEnum; 025import ca.uhn.fhir.context.RuntimeResourceDefinition; 026import ca.uhn.fhir.context.RuntimeSearchParam; 027import ca.uhn.fhir.i18n.Msg; 028import ca.uhn.fhir.interceptor.api.HookParams; 029import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 030import ca.uhn.fhir.interceptor.api.Pointcut; 031import ca.uhn.fhir.interceptor.model.RequestPartitionId; 032import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 033import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 034import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 035import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 036import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 037import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 038import ca.uhn.fhir.jpa.dao.BaseStorageDao; 039import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 040import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 041import ca.uhn.fhir.jpa.dao.IResultIterator; 042import ca.uhn.fhir.jpa.dao.ISearchBuilder; 043import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 044import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 045import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 046import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 047import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 048import ca.uhn.fhir.jpa.model.config.PartitionSettings; 049import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 050import ca.uhn.fhir.jpa.model.dao.JpaPid; 051import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 052import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 053import ca.uhn.fhir.jpa.model.entity.BaseTag; 054import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 055import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 057import ca.uhn.fhir.jpa.model.entity.ResourceLink; 058import ca.uhn.fhir.jpa.model.entity.ResourceTag; 059import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 060import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 061import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 062import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 063import ca.uhn.fhir.jpa.search.SearchConstants; 064import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 065import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 066import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 067import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 069import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 070import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 071import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 072import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 073import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 074import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 075import ca.uhn.fhir.jpa.util.BaseIterator; 076import ca.uhn.fhir.jpa.util.CartesianProductUtil; 077import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 078import ca.uhn.fhir.jpa.util.QueryChunker; 079import ca.uhn.fhir.jpa.util.ScrollableResultsIterator; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.SearchParameterUtil; 108import ca.uhn.fhir.util.StopWatch; 109import ca.uhn.fhir.util.StringUtil; 110import ca.uhn.fhir.util.UrlUtil; 111import com.google.common.annotations.VisibleForTesting; 112import com.google.common.collect.ListMultimap; 113import com.google.common.collect.Lists; 114import com.google.common.collect.MultimapBuilder; 115import com.healthmarketscience.sqlbuilder.Condition; 116import jakarta.annotation.Nonnull; 117import jakarta.annotation.Nullable; 118import jakarta.persistence.EntityManager; 119import jakarta.persistence.PersistenceContext; 120import jakarta.persistence.PersistenceContextType; 121import jakarta.persistence.Query; 122import jakarta.persistence.Tuple; 123import jakarta.persistence.TypedQuery; 124import jakarta.persistence.criteria.CriteriaBuilder; 125import jakarta.persistence.criteria.CriteriaQuery; 126import jakarta.persistence.criteria.Predicate; 127import jakarta.persistence.criteria.Root; 128import jakarta.persistence.criteria.Selection; 129import org.apache.commons.collections4.ListUtils; 130import org.apache.commons.lang3.StringUtils; 131import org.apache.commons.lang3.Validate; 132import org.apache.commons.lang3.math.NumberUtils; 133import org.apache.commons.lang3.tuple.Pair; 134import org.hibernate.ScrollMode; 135import org.hibernate.ScrollableResults; 136import org.hl7.fhir.instance.model.api.IAnyResource; 137import org.hl7.fhir.instance.model.api.IBaseResource; 138import org.hl7.fhir.instance.model.api.IIdType; 139import org.slf4j.Logger; 140import org.slf4j.LoggerFactory; 141import org.springframework.beans.factory.annotation.Autowired; 142import org.springframework.jdbc.core.JdbcTemplate; 143import org.springframework.transaction.support.TransactionSynchronizationManager; 144 145import java.util.ArrayList; 146import java.util.Arrays; 147import java.util.Collection; 148import java.util.Collections; 149import java.util.Comparator; 150import java.util.HashMap; 151import java.util.HashSet; 152import java.util.Iterator; 153import java.util.LinkedList; 154import java.util.List; 155import java.util.Map; 156import java.util.Objects; 157import java.util.Set; 158import java.util.stream.Collectors; 159 160import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 161import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 162import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 163import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 164import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 165import static java.util.Objects.requireNonNull; 166import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 167import static org.apache.commons.lang3.StringUtils.isBlank; 168import static org.apache.commons.lang3.StringUtils.isNotBlank; 169import static org.apache.commons.lang3.StringUtils.stripStart; 170 171/** 172 * The SearchBuilder is responsible for actually forming the SQL query that handles 173 * searches for resources 174 */ 175public class SearchBuilder implements ISearchBuilder<JpaPid> { 176 177 /** 178 * See loadResourcesByPid 179 * for an explanation of why we use the constant 800 180 */ 181 // NB: keep public 182 @Deprecated 183 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 184 185 public static final String RESOURCE_ID_ALIAS = "resource_id"; 186 public static final String PARTITION_ID_ALIAS = "partition_id"; 187 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 188 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 189 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 190 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 191 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 192 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 193 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 194 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 195 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 196 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 197 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 198 public static boolean myUseMaxPageSize50ForTest = false; 199 public static Integer myMaxPageSizeForTests = null; 200 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 201 protected final IResourceTagDao myResourceTagDao; 202 private String myResourceName; 203 private final Class<? extends IBaseResource> myResourceType; 204 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 205 private final SqlObjectFactory mySqlBuilderFactory; 206 private final HibernatePropertiesProvider myDialectProvider; 207 private final ISearchParamRegistry mySearchParamRegistry; 208 private final PartitionSettings myPartitionSettings; 209 private final DaoRegistry myDaoRegistry; 210 private final FhirContext myContext; 211 private final IIdHelperService<JpaPid> myIdHelperService; 212 private final JpaStorageSettings myStorageSettings; 213 private final SearchQueryProperties mySearchProperties; 214 private final IResourceHistoryTableDao myResourceHistoryTableDao; 215 private final IJpaStorageResourceParser myJpaStorageResourceParser; 216 217 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 218 protected EntityManager myEntityManager; 219 220 private CriteriaBuilder myCriteriaBuilder; 221 private SearchParameterMap myParams; 222 private String mySearchUuid; 223 private int myFetchSize; 224 225 private boolean myRequiresTotal; 226 227 /** 228 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 229 */ 230 private Set<JpaPid> myPidSet; 231 232 private boolean myHasNextIteratorQuery = false; 233 private RequestPartitionId myRequestPartitionId; 234 235 private IFulltextSearchSvc myFulltextSearchSvc; 236 237 @Autowired(required = false) 238 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 239 myFulltextSearchSvc = theFulltextSearchSvc; 240 } 241 242 @Autowired(required = false) 243 private IElasticsearchSvc myIElasticsearchSvc; 244 245 @Autowired 246 private IResourceHistoryTagDao myResourceHistoryTagDao; 247 248 @Autowired 249 private IRequestPartitionHelperSvc myPartitionHelperSvc; 250 251 /** 252 * Constructor 253 */ 254 @SuppressWarnings({"rawtypes", "unchecked"}) 255 public SearchBuilder( 256 String theResourceName, 257 JpaStorageSettings theStorageSettings, 258 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 259 SqlObjectFactory theSqlBuilderFactory, 260 HibernatePropertiesProvider theDialectProvider, 261 ISearchParamRegistry theSearchParamRegistry, 262 PartitionSettings thePartitionSettings, 263 IInterceptorBroadcaster theInterceptorBroadcaster, 264 IResourceTagDao theResourceTagDao, 265 DaoRegistry theDaoRegistry, 266 FhirContext theContext, 267 IIdHelperService theIdHelperService, 268 IResourceHistoryTableDao theResourceHistoryTagDao, 269 IJpaStorageResourceParser theIJpaStorageResourceParser, 270 Class<? extends IBaseResource> theResourceType) { 271 myResourceName = theResourceName; 272 myResourceType = theResourceType; 273 myStorageSettings = theStorageSettings; 274 275 myEntityManagerFactory = theEntityManagerFactory; 276 mySqlBuilderFactory = theSqlBuilderFactory; 277 myDialectProvider = theDialectProvider; 278 mySearchParamRegistry = theSearchParamRegistry; 279 myPartitionSettings = thePartitionSettings; 280 myInterceptorBroadcaster = theInterceptorBroadcaster; 281 myResourceTagDao = theResourceTagDao; 282 myDaoRegistry = theDaoRegistry; 283 myContext = theContext; 284 myIdHelperService = theIdHelperService; 285 myResourceHistoryTableDao = theResourceHistoryTagDao; 286 myJpaStorageResourceParser = theIJpaStorageResourceParser; 287 288 mySearchProperties = new SearchQueryProperties(); 289 } 290 291 @VisibleForTesting 292 void setResourceName(String theName) { 293 myResourceName = theName; 294 } 295 296 @Override 297 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 298 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 299 } 300 301 @Override 302 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 303 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 304 } 305 306 @Override 307 public void setRequireTotal(boolean theRequireTotal) { 308 myRequiresTotal = theRequireTotal; 309 } 310 311 @Override 312 public boolean requiresTotal() { 313 return myRequiresTotal; 314 } 315 316 private void searchForIdsWithAndOr( 317 SearchQueryBuilder theSearchSqlBuilder, 318 QueryStack theQueryStack, 319 @Nonnull SearchParameterMap theParams, 320 RequestDetails theRequest) { 321 myParams = theParams; 322 mySearchProperties.setSortSpec(myParams.getSort()); 323 324 // Remove any empty parameters 325 theParams.clean(); 326 327 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 328 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 329 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 330 } 331 332 // Attempt to lookup via composite unique key. 333 if (isCompositeUniqueSpCandidate()) { 334 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 335 } 336 337 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 338 List<String> paramNames = myParams.keySet().stream() 339 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 340 .filter(t -> !t.equals(Constants.PARAM_TAG)) 341 .collect(Collectors.toList()); 342 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 343 paramNames.add(IAnyResource.SP_RES_ID); 344 } 345 if (myParams.containsKey(Constants.PARAM_TAG)) { 346 paramNames.add(Constants.PARAM_TAG); 347 } 348 349 // Handle each parameter 350 for (String nextParamName : paramNames) { 351 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 352 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 353 // Elasticsearch 354 continue; 355 } 356 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 357 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 358 .setParamName(nextParamName) 359 .setAndOrParams(andOrParams) 360 .setRequest(theRequest) 361 .setRequestPartitionId(myRequestPartitionId)); 362 if (predicate != null) { 363 theSearchSqlBuilder.addPredicate(predicate); 364 } 365 } 366 } 367 368 /** 369 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 370 * parameters all have no modifiers. 371 */ 372 private boolean isCompositeUniqueSpCandidate() { 373 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 374 } 375 376 @SuppressWarnings("ConstantConditions") 377 @Override 378 public Long createCountQuery( 379 SearchParameterMap theParams, 380 String theSearchUuid, 381 RequestDetails theRequest, 382 @Nonnull RequestPartitionId theRequestPartitionId) { 383 384 assert theRequestPartitionId != null; 385 assert TransactionSynchronizationManager.isActualTransactionActive(); 386 387 init(theParams, theSearchUuid, theRequestPartitionId); 388 389 if (checkUseHibernateSearch()) { 390 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 391 } 392 393 SearchQueryProperties properties = mySearchProperties.clone(); 394 properties.setDoCountOnlyFlag(true); 395 properties.setSortSpec(null); // counts don't require sorts 396 properties.setMaxResultsRequested(null); 397 properties.setOffset(null); 398 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 399 if (queries.isEmpty()) { 400 return 0L; 401 } else { 402 JpaPid jpaPid = queries.get(0).next(); 403 return jpaPid.getId(); 404 } 405 } 406 407 /** 408 * @param thePidSet May be null 409 */ 410 @Override 411 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 412 myPidSet = new HashSet<>(thePidSet); 413 } 414 415 @SuppressWarnings("ConstantConditions") 416 @Override 417 public IResultIterator<JpaPid> createQuery( 418 SearchParameterMap theParams, 419 SearchRuntimeDetails theSearchRuntimeDetails, 420 RequestDetails theRequest, 421 @Nonnull RequestPartitionId theRequestPartitionId) { 422 assert theRequestPartitionId != null; 423 assert TransactionSynchronizationManager.isActualTransactionActive(); 424 425 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 426 427 if (myPidSet == null) { 428 myPidSet = new HashSet<>(); 429 } 430 431 return new QueryIterator(theSearchRuntimeDetails, theRequest); 432 } 433 434 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 435 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 436 // we mutate the params. Make a private copy. 437 myParams = theParams.clone(); 438 mySearchProperties.setSortSpec(myParams.getSort()); 439 mySearchUuid = theSearchUuid; 440 myRequestPartitionId = theRequestPartitionId; 441 } 442 443 /** 444 * The query created can be either a count query or the 445 * actual query. 446 * This is why it takes a SearchQueryProperties object 447 * (and doesn't use the local version of it). 448 * The properties may differ slightly for whichever 449 * query this is. 450 */ 451 private List<ISearchQueryExecutor> createQuery( 452 SearchParameterMap theParams, 453 SearchQueryProperties theSearchProperties, 454 RequestDetails theRequest, 455 SearchRuntimeDetails theSearchRuntimeDetails) { 456 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 457 458 if (checkUseHibernateSearch()) { 459 // we're going to run at least part of the search against the Fulltext service. 460 461 // Ugh - we have two different return types for now 462 ISearchQueryExecutor fulltextExecutor = null; 463 List<JpaPid> fulltextMatchIds = null; 464 int resultCount = 0; 465 if (myParams.isLastN()) { 466 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 467 resultCount = fulltextMatchIds.size(); 468 } else if (myParams.getEverythingMode() != null) { 469 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 470 resultCount = fulltextMatchIds.size(); 471 } else { 472 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 473 // enabled SP indexing). 474 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 475 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 476 // extra work in Elastic. 477 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 478 479 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 480 // a page. 481 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 482 } 483 484 if (fulltextExecutor == null) { 485 fulltextExecutor = 486 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 487 } 488 489 if (theSearchRuntimeDetails != null) { 490 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 491 IInterceptorBroadcaster compositeBroadcaster = 492 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 493 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 494 HookParams params = new HookParams() 495 .add(RequestDetails.class, theRequest) 496 .addIfMatchesType(ServletRequestDetails.class, theRequest) 497 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 498 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 499 } 500 } 501 502 // can we skip the database entirely and return the pid list from here? 503 boolean canSkipDatabase = 504 // if we processed an AND clause, and it returned nothing, then nothing can match. 505 !fulltextExecutor.hasNext() 506 || 507 // Our hibernate search query doesn't respect partitions yet 508 (!myPartitionSettings.isPartitioningEnabled() 509 && 510 // were there AND terms left? Then we still need the db. 511 theParams.isEmpty() 512 && 513 // not every param is a param. :-( 514 theParams.getNearDistanceParam() == null 515 && 516 // todo MB don't we support _lastUpdated and _offset now? 517 theParams.getLastUpdated() == null 518 && theParams.getEverythingMode() == null 519 && theParams.getOffset() == null); 520 521 if (canSkipDatabase) { 522 ourLog.trace("Query finished after HSearch. Skip db query phase"); 523 if (theSearchProperties.hasMaxResultsRequested()) { 524 fulltextExecutor = SearchQueryExecutors.limited( 525 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 526 } 527 queries.add(fulltextExecutor); 528 } else { 529 ourLog.trace("Query needs db after HSearch. Chunking."); 530 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 531 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 532 QueryChunker.chunk( 533 fulltextExecutor, 534 SearchBuilder.getMaximumPageSize(), 535 // for each list of (SearchBuilder.getMaximumPageSize()) 536 // we create a chunked query and add it to 'queries' 537 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 538 } 539 } else { 540 // do everything in the database. 541 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 542 } 543 544 return queries; 545 } 546 547 /** 548 * Check to see if query should use Hibernate Search, and error if the query can't continue. 549 * 550 * @return true if the query should first be processed by Hibernate Search 551 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 552 */ 553 private boolean checkUseHibernateSearch() { 554 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 555 556 if (!fulltextEnabled) { 557 failIfUsed(Constants.PARAM_TEXT); 558 failIfUsed(Constants.PARAM_CONTENT); 559 } else { 560 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 561 final String paramName = sortSpec.getParamName(); 562 if (paramName.contains(".")) { 563 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 564 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 565 } 566 } 567 } 568 569 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 570 // can. 571 return fulltextEnabled 572 && myParams != null 573 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 574 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 575 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 576 } 577 578 private void failIfUsed(String theParamName) { 579 if (myParams.containsKey(theParamName)) { 580 throw new InvalidRequestException(Msg.code(1192) 581 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 582 } 583 } 584 585 private void failIfUsedWithChainedSort(String theParamName) { 586 if (myParams.containsKey(theParamName)) { 587 throw new InvalidRequestException(Msg.code(2524) 588 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 589 + theParamName); 590 } 591 } 592 593 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 594 // Can we use our hibernate search generated index on resource to support lastN?: 595 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 596 if (myFulltextSearchSvc == null) { 597 throw new InvalidRequestException(Msg.code(2027) 598 + "LastN operation is not enabled on this service, can not process this request"); 599 } 600 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 601 .map(t -> (JpaPid) t) 602 .collect(Collectors.toList()); 603 } else { 604 throw new InvalidRequestException( 605 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 606 } 607 } 608 609 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 610 JpaPid pid = null; 611 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 612 String idParamValue; 613 IQueryParameterType idParam = 614 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 615 if (idParam instanceof TokenParam idParm) { 616 idParamValue = idParm.getValue(); 617 } else { 618 StringParam idParm = (StringParam) idParam; 619 idParamValue = idParm.getValue(); 620 } 621 622 pid = myIdHelperService 623 .resolveResourceIdentity( 624 myRequestPartitionId, 625 myResourceName, 626 idParamValue, 627 ResolveIdentityMode.includeDeleted().cacheOk()) 628 .getPersistentId(); 629 } 630 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 631 } 632 633 private void doCreateChunkedQueries( 634 SearchParameterMap theParams, 635 List<JpaPid> thePids, 636 SearchQueryProperties theSearchQueryProperties, 637 RequestDetails theRequest, 638 ArrayList<ISearchQueryExecutor> theQueries) { 639 640 if (thePids.size() < getMaximumPageSize()) { 641 thePids = normalizeIdListForInClause(thePids); 642 } 643 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 644 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 645 } 646 647 /** 648 * Combs through the params for any _id parameters and extracts the PIDs for them 649 */ 650 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 651 // get all the IQueryParameterType objects 652 // for _id -> these should all be StringParam values 653 HashSet<IIdType> ids = new HashSet<>(); 654 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 655 for (List<IQueryParameterType> paramList : params) { 656 for (IQueryParameterType param : paramList) { 657 String id; 658 if (param instanceof StringParam) { 659 // we expect all _id values to be StringParams 660 id = ((StringParam) param).getValue(); 661 } else if (param instanceof TokenParam) { 662 id = ((TokenParam) param).getValue(); 663 } else { 664 // we do not expect the _id parameter to be a non-string value 665 throw new IllegalArgumentException( 666 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 667 } 668 669 IIdType idType = myContext.getVersion().newIdType(); 670 if (id.contains("/")) { 671 idType.setValue(id); 672 } else { 673 idType.setValue(myResourceName + "/" + id); 674 } 675 ids.add(idType); 676 } 677 } 678 679 // fetch our target Pids 680 // this will throw if an id is not found 681 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 682 myRequestPartitionId, 683 new ArrayList<>(ids), 684 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 685 686 // add the pids to targetPids 687 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 688 theTargetPids.add(pid.getPersistentId()); 689 } 690 } 691 692 private void createChunkedQuery( 693 SearchParameterMap theParams, 694 SearchQueryProperties theSearchProperties, 695 RequestDetails theRequest, 696 List<JpaPid> thePidList, 697 List<ISearchQueryExecutor> theSearchQueryExecutors) { 698 if (myParams.getEverythingMode() != null) { 699 createChunkedQueryForEverythingSearch( 700 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 701 } else { 702 createChunkedQueryNormalSearch( 703 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 704 } 705 } 706 707 private void createChunkedQueryNormalSearch( 708 SearchParameterMap theParams, 709 SearchQueryProperties theSearchProperties, 710 RequestDetails theRequest, 711 List<JpaPid> thePidList, 712 List<ISearchQueryExecutor> theSearchQueryExecutors) { 713 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 714 myContext, 715 myStorageSettings, 716 myPartitionSettings, 717 myRequestPartitionId, 718 myResourceName, 719 mySqlBuilderFactory, 720 myDialectProvider, 721 theSearchProperties.isDoCountOnlyFlag()); 722 QueryStack queryStack3 = new QueryStack( 723 theRequest, 724 theParams, 725 myStorageSettings, 726 myContext, 727 sqlBuilder, 728 mySearchParamRegistry, 729 myPartitionSettings); 730 731 if (theParams.keySet().size() > 1 732 || theParams.getSort() != null 733 || theParams.keySet().contains(Constants.PARAM_HAS) 734 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 735 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 736 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 737 if (activeComboParams.isEmpty()) { 738 sqlBuilder.setNeedResourceTableRoot(true); 739 } 740 } 741 742 /* 743 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 744 * specific filters with ORs as their root from working around the natural resource type / deletion 745 * status / partition IDs built into queries. 746 */ 747 if (theParams.containsKey(Constants.PARAM_FILTER)) { 748 Condition partitionIdPredicate = sqlBuilder 749 .getOrCreateResourceTablePredicateBuilder() 750 .createPartitionIdPredicate(myRequestPartitionId); 751 if (partitionIdPredicate != null) { 752 sqlBuilder.addPredicate(partitionIdPredicate); 753 } 754 } 755 756 // Normal search 757 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 758 759 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 760 // partition ID predicate in that case. 761 if (!sqlBuilder.haveAtLeastOnePredicate()) { 762 Condition partitionIdPredicate = sqlBuilder 763 .getOrCreateResourceTablePredicateBuilder() 764 .createPartitionIdPredicate(myRequestPartitionId); 765 if (partitionIdPredicate != null) { 766 sqlBuilder.addPredicate(partitionIdPredicate); 767 } 768 } 769 770 // Add PID list predicate for full text search and/or lastn operation 771 addPidListPredicate(thePidList, sqlBuilder); 772 773 // Last updated 774 addLastUpdatePredicate(sqlBuilder); 775 776 /* 777 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 778 * to something needed to guarantee correct results. 779 * 780 * Why do we need it? Suppose for example, a query like: 781 * Observation?category=foo,bar,baz 782 * And suppose you have many resources that have all 3 of these category codes. In this case 783 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 784 * we may exhaust the query results without getting enough distinct results back. When that 785 * happens we re-run the query with a larger limit. Excluding results we already know about 786 * tries to ensure that we get new unique results. 787 * 788 * The challenge with that though is that lots of DBs have an issue with too many 789 * parameters in one query. So we only do this optimization if there aren't too 790 * many results. 791 */ 792 if (myHasNextIteratorQuery) { 793 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 794 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 795 } 796 } 797 798 /* 799 * If offset is present, we want to deduplicate the results by using GROUP BY; 800 * OR 801 * if the MaxResultsToFetch is null, we are requesting "everything", 802 * so we'll let the db do the deduplication (instead of in-memory) 803 */ 804 if (theSearchProperties.isDeduplicateInDatabase()) { 805 queryStack3.addGrouping(); 806 queryStack3.setUseAggregate(true); 807 } 808 809 /* 810 * Sort 811 * 812 * If we have a sort, we wrap the criteria search (the search that actually 813 * finds the appropriate resources) in an outer search which is then sorted 814 */ 815 if (theSearchProperties.hasSort()) { 816 assert !theSearchProperties.isDoCountOnlyFlag(); 817 818 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 819 } 820 821 /* 822 * Now perform the search 823 */ 824 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 825 } 826 827 private void executeSearch( 828 SearchQueryProperties theProperties, 829 List<ISearchQueryExecutor> theSearchQueryExecutors, 830 SearchQueryBuilder sqlBuilder) { 831 GeneratedSql generatedSql = 832 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 833 if (!generatedSql.isMatchNothing()) { 834 SearchQueryExecutor executor = 835 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 836 theSearchQueryExecutors.add(executor); 837 } 838 } 839 840 private void createChunkedQueryForEverythingSearch( 841 RequestDetails theRequest, 842 SearchParameterMap theParams, 843 SearchQueryProperties theSearchQueryProperties, 844 List<JpaPid> thePidList, 845 List<ISearchQueryExecutor> theSearchQueryExecutors) { 846 847 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 848 myContext, 849 myStorageSettings, 850 myPartitionSettings, 851 myRequestPartitionId, 852 null, 853 mySqlBuilderFactory, 854 myDialectProvider, 855 theSearchQueryProperties.isDoCountOnlyFlag()); 856 857 QueryStack queryStack3 = new QueryStack( 858 theRequest, 859 theParams, 860 myStorageSettings, 861 myContext, 862 sqlBuilder, 863 mySearchParamRegistry, 864 myPartitionSettings); 865 866 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 867 868 Set<JpaPid> targetPids = new HashSet<>(); 869 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 870 871 extractTargetPidsFromIdParams(targetPids); 872 873 // add the target pids to our executors as the first 874 // results iterator to go through 875 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 876 } else { 877 // For Everything queries, we make the query root by the ResourceLink table, since this query 878 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 879 // the one problem with this approach is that it doesn't catch Patients that have absolutely 880 // nothing linked to them. So we do one additional query to make sure we catch those too. 881 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 882 myContext, 883 myStorageSettings, 884 myPartitionSettings, 885 myRequestPartitionId, 886 myResourceName, 887 mySqlBuilderFactory, 888 myDialectProvider, 889 theSearchQueryProperties.isDoCountOnlyFlag()); 890 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 891 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 892 String sql = allTargetsSql.getSql(); 893 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 894 895 List<JpaPid> output = 896 jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); 897 898 // we add a search executor to fetch unlinked patients first 899 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 900 } 901 902 List<String> typeSourceResources = new ArrayList<>(); 903 if (myParams.get(Constants.PARAM_TYPE) != null) { 904 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 905 } 906 907 queryStack3.addPredicateEverythingOperation( 908 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 909 910 // Add PID list predicate for full text search and/or lastn operation 911 addPidListPredicate(thePidList, sqlBuilder); 912 913 /* 914 * If offset is present, we want deduplicate the results by using GROUP BY 915 * ORDER BY is required to make sure we return unique results for each page 916 */ 917 if (theSearchQueryProperties.hasOffset()) { 918 queryStack3.addGrouping(); 919 queryStack3.addOrdering(); 920 queryStack3.setUseAggregate(true); 921 } 922 923 /* 924 * Now perform the search 925 */ 926 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 927 } 928 929 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 930 if (thePidList != null && !thePidList.isEmpty()) { 931 theSqlBuilder.addResourceIdsPredicate(thePidList); 932 } 933 } 934 935 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 936 DateRangeParam lu = myParams.getLastUpdated(); 937 if (lu != null && !lu.isEmpty()) { 938 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 939 theSqlBuilder.addPredicate(lastUpdatedPredicates); 940 } 941 } 942 943 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 944 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 945 jdbcTemplate.setFetchSize(myFetchSize); 946 if (theMaximumResults != null) { 947 jdbcTemplate.setMaxRows(theMaximumResults); 948 } 949 return jdbcTemplate; 950 } 951 952 private Collection<String> extractTypeSourceResourcesFromParams() { 953 954 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 955 956 // first off, let's flatten the list of list 957 List<IQueryParameterType> iQueryParameterTypesList = 958 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 959 960 // then, extract all elements of each CSV into one big list 961 List<String> resourceTypes = iQueryParameterTypesList.stream() 962 .map(param -> ((StringParam) param).getValue()) 963 .map(csvString -> List.of(csvString.split(","))) 964 .flatMap(List::stream) 965 .collect(Collectors.toList()); 966 967 Set<String> knownResourceTypes = myContext.getResourceTypes(); 968 969 // remove leading/trailing whitespaces if any and remove duplicates 970 Set<String> retVal = new HashSet<>(); 971 972 for (String type : resourceTypes) { 973 String trimmed = type.trim(); 974 if (!knownResourceTypes.contains(trimmed)) { 975 throw new ResourceNotFoundException( 976 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 977 } 978 retVal.add(trimmed); 979 } 980 981 return retVal; 982 } 983 984 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 985 return myStorageSettings.isIndexOnContainedResources() 986 && theParams.values().stream() 987 .flatMap(Collection::stream) 988 .flatMap(Collection::stream) 989 .anyMatch(ReferenceParam.class::isInstance); 990 } 991 992 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 993 if (theSort == null || isBlank(theSort.getParamName())) { 994 return; 995 } 996 997 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 998 999 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 1000 1001 theQueryStack.addSortOnResourceId(ascending); 1002 1003 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1004 1005 theQueryStack.addSortOnResourcePID(ascending); 1006 1007 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1008 1009 theQueryStack.addSortOnLastUpdated(ascending); 1010 1011 } else { 1012 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1013 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1014 1015 /* 1016 * If we have a sort like _sort=subject.name and we have an 1017 * uplifted refchain for that combination we can do it more efficiently 1018 * by using the index associated with the uplifted refchain. In this case, 1019 * we need to find the actual target search parameter (corresponding 1020 * to "name" in this example) so that we know what datatype it is. 1021 */ 1022 String paramName = theSort.getParamName(); 1023 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1024 String[] chains = StringUtils.split(paramName, '.'); 1025 if (chains.length == 2) { 1026 1027 // Given: Encounter?_sort=Patient:subject.name 1028 String referenceParam = chains[0]; // subject 1029 String referenceParamTargetType = null; // Patient 1030 String targetParam = chains[1]; // name 1031 1032 int colonIdx = referenceParam.indexOf(':'); 1033 if (colonIdx > -1) { 1034 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1035 referenceParam = referenceParam.substring(colonIdx + 1); 1036 } 1037 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1038 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1039 if (outerParam == null) { 1040 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1041 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1042 for (String nextTargetType : outerParam.getTargets()) { 1043 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1044 continue; 1045 } 1046 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1047 nextTargetType, 1048 targetParam, 1049 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1050 if (innerParam != null) { 1051 param = innerParam; 1052 break; 1053 } 1054 } 1055 } 1056 } 1057 } 1058 1059 int colonIdx = paramName.indexOf(':'); 1060 String referenceTargetType = null; 1061 if (colonIdx > -1) { 1062 referenceTargetType = paramName.substring(0, colonIdx); 1063 paramName = paramName.substring(colonIdx + 1); 1064 } 1065 1066 int dotIdx = paramName.indexOf('.'); 1067 String chainName = null; 1068 if (param == null && dotIdx > -1) { 1069 chainName = paramName.substring(dotIdx + 1); 1070 paramName = paramName.substring(0, dotIdx); 1071 if (chainName.contains(".")) { 1072 String msg = myContext 1073 .getLocalizer() 1074 .getMessageSanitized( 1075 BaseStorageDao.class, 1076 "invalidSortParameterTooManyChains", 1077 paramName + "." + chainName); 1078 throw new InvalidRequestException(Msg.code(2286) + msg); 1079 } 1080 } 1081 1082 if (param == null) { 1083 param = mySearchParamRegistry.getActiveSearchParam( 1084 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1085 } 1086 1087 if (param == null) { 1088 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1089 } 1090 1091 // param will never be null here (the above line throws if it does) 1092 // this is just to prevent the warning 1093 assert param != null; 1094 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1095 throw new InvalidRequestException( 1096 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1097 } 1098 1099 switch (param.getParamType()) { 1100 case STRING: 1101 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1102 break; 1103 case DATE: 1104 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1105 break; 1106 case REFERENCE: 1107 theQueryStack.addSortOnResourceLink( 1108 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1109 break; 1110 case TOKEN: 1111 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1112 break; 1113 case NUMBER: 1114 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1115 break; 1116 case URI: 1117 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1118 break; 1119 case QUANTITY: 1120 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1121 break; 1122 case COMPOSITE: 1123 List<RuntimeSearchParam> compositeList = 1124 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1125 if (compositeList == null) { 1126 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1127 + " is not defined by the resource " + myResourceName); 1128 } 1129 if (compositeList.size() != 2) { 1130 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1131 + " must have 2 composite types declared in parameter annotation, found " 1132 + compositeList.size()); 1133 } 1134 RuntimeSearchParam left = compositeList.get(0); 1135 RuntimeSearchParam right = compositeList.get(1); 1136 1137 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1138 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1139 1140 break; 1141 case SPECIAL: 1142 if (LOCATION_POSITION.equals(param.getPath())) { 1143 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1144 break; 1145 } 1146 throw new InvalidRequestException( 1147 Msg.code(2306) + "This server does not support _sort specifications of type " 1148 + param.getParamType() + " - Can't serve _sort=" + paramName); 1149 1150 case HAS: 1151 default: 1152 throw new InvalidRequestException( 1153 Msg.code(1197) + "This server does not support _sort specifications of type " 1154 + param.getParamType() + " - Can't serve _sort=" + paramName); 1155 } 1156 } 1157 1158 // Recurse 1159 createSort(theQueryStack, theSort.getChain(), theParams); 1160 } 1161 1162 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1163 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1164 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1165 String msg = myContext 1166 .getLocalizer() 1167 .getMessageSanitized( 1168 BaseStorageDao.class, 1169 "invalidSortParameter", 1170 theParamName, 1171 theResourceName, 1172 validSearchParameterNames); 1173 throw new InvalidRequestException(Msg.code(1194) + msg); 1174 } 1175 1176 private void createCompositeSort( 1177 QueryStack theQueryStack, 1178 RestSearchParameterTypeEnum theParamType, 1179 String theParamName, 1180 boolean theAscending) { 1181 1182 switch (theParamType) { 1183 case STRING: 1184 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1185 break; 1186 case DATE: 1187 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1188 break; 1189 case TOKEN: 1190 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1191 break; 1192 case QUANTITY: 1193 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1194 break; 1195 case NUMBER: 1196 case REFERENCE: 1197 case COMPOSITE: 1198 case URI: 1199 case HAS: 1200 case SPECIAL: 1201 default: 1202 throw new InvalidRequestException( 1203 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1204 + " on _sort=" + theParamName); 1205 } 1206 } 1207 1208 private void doLoadPids( 1209 Collection<JpaPid> thePids, 1210 Collection<JpaPid> theIncludedPids, 1211 List<IBaseResource> theResourceListToPopulate, 1212 boolean theForHistoryOperation, 1213 Map<Long, Integer> thePosition) { 1214 1215 Map<JpaPid, Long> resourcePidToVersion = null; 1216 for (JpaPid next : thePids) { 1217 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1218 if (resourcePidToVersion == null) { 1219 resourcePidToVersion = new HashMap<>(); 1220 } 1221 resourcePidToVersion.put(next, next.getVersion()); 1222 } 1223 } 1224 1225 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1226 if (versionlessPids.size() < getMaximumPageSize()) { 1227 versionlessPids = normalizeIdListForInClause(versionlessPids); 1228 } 1229 1230 // Load the resource bodies 1231 List<ResourceHistoryTable> resourceSearchViewList = 1232 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1233 JpaPidFk.fromPids(versionlessPids)); 1234 1235 /* 1236 * If we have specific versions to load, replace the history entries with the 1237 * correct ones 1238 * 1239 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1240 * version entity first, and by batching the fetches. But this is a fairly infrequently 1241 * used feature, and loading history entities by PK is a very efficient query so it's 1242 * not the end of the world 1243 */ 1244 if (resourcePidToVersion != null) { 1245 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1246 ResourceHistoryTable next = resourceSearchViewList.get(i); 1247 JpaPid resourceId = next.getPersistentId(); 1248 Long version = resourcePidToVersion.get(resourceId); 1249 resourceId.setVersion(version); 1250 if (version != null && !version.equals(next.getVersion())) { 1251 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1252 next.getResourceId().toFk(), version); 1253 resourceSearchViewList.set(i, replacement); 1254 } 1255 } 1256 } 1257 1258 // -- preload all tags with tag definition if any 1259 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1260 1261 for (ResourceHistoryTable next : resourceSearchViewList) { 1262 if (next.getDeleted() != null) { 1263 continue; 1264 } 1265 1266 Class<? extends IBaseResource> resourceType = 1267 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1268 1269 JpaPid resourceId = next.getPersistentId(); 1270 1271 if (resourcePidToVersion != null) { 1272 Long version = resourcePidToVersion.get(resourceId); 1273 resourceId.setVersion(version); 1274 } 1275 1276 IBaseResource resource; 1277 resource = myJpaStorageResourceParser.toResource( 1278 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1279 if (resource == null) { 1280 ourLog.warn( 1281 "Unable to find resource {}/{}/_history/{} in database", 1282 next.getResourceType(), 1283 next.getIdDt().getIdPart(), 1284 next.getVersion()); 1285 continue; 1286 } 1287 1288 Integer index = thePosition.get(resourceId.getId()); 1289 if (index == null) { 1290 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1291 continue; 1292 } 1293 1294 if (theIncludedPids.contains(resourceId)) { 1295 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1296 } else { 1297 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1298 } 1299 1300 // ensure there's enough space; "<=" because of 0-indexing 1301 while (theResourceListToPopulate.size() <= index) { 1302 theResourceListToPopulate.add(null); 1303 } 1304 theResourceListToPopulate.set(index, resource); 1305 } 1306 } 1307 1308 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1309 return switch (myStorageSettings.getTagStorageMode()) { 1310 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1311 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1312 case INLINE -> Map.of(); 1313 }; 1314 } 1315 1316 @Nonnull 1317 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1318 Collection<ResourceHistoryTable> theHistoryTables) { 1319 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1320 1321 // -- find all resource has tags 1322 for (ResourceHistoryTable resource : theHistoryTables) { 1323 if (resource.isHasTags()) { 1324 idList.add(resource.getId()); 1325 } 1326 } 1327 1328 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1329 1330 // -- no tags 1331 if (idList.isEmpty()) { 1332 return tagMap; 1333 } 1334 1335 // -- get all tags for the idList 1336 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1337 1338 // -- build the map, key = resourceId, value = list of ResourceTag 1339 JpaPid resourceId; 1340 Collection<BaseTag> tagCol; 1341 for (ResourceHistoryTag tag : tagList) { 1342 1343 resourceId = tag.getResourcePid(); 1344 tagCol = tagMap.get(resourceId); 1345 if (tagCol == null) { 1346 tagCol = new ArrayList<>(); 1347 tagCol.add(tag); 1348 tagMap.put(resourceId, tagCol); 1349 } else { 1350 tagCol.add(tag); 1351 } 1352 } 1353 1354 return tagMap; 1355 } 1356 1357 @Nonnull 1358 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1359 Collection<ResourceHistoryTable> theHistoryTables) { 1360 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1361 1362 // -- find all resource has tags 1363 for (ResourceHistoryTable resource : theHistoryTables) { 1364 if (resource.isHasTags()) { 1365 idList.add(resource.getResourceId()); 1366 } 1367 } 1368 1369 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1370 1371 // -- no tags 1372 if (idList.isEmpty()) { 1373 return tagMap; 1374 } 1375 1376 // -- get all tags for the idList 1377 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1378 1379 // -- build the map, key = resourceId, value = list of ResourceTag 1380 JpaPid resourceId; 1381 Collection<BaseTag> tagCol; 1382 for (ResourceTag tag : tagList) { 1383 1384 resourceId = tag.getResourceId(); 1385 tagCol = tagMap.get(resourceId); 1386 if (tagCol == null) { 1387 tagCol = new ArrayList<>(); 1388 tagCol.add(tag); 1389 tagMap.put(resourceId, tagCol); 1390 } else { 1391 tagCol.add(tag); 1392 } 1393 } 1394 1395 return tagMap; 1396 } 1397 1398 @Override 1399 public void loadResourcesByPid( 1400 Collection<JpaPid> thePids, 1401 Collection<JpaPid> theIncludedPids, 1402 List<IBaseResource> theResourceListToPopulate, 1403 boolean theForHistoryOperation, 1404 RequestDetails theDetails) { 1405 if (thePids.isEmpty()) { 1406 ourLog.debug("The include pids are empty"); 1407 } 1408 1409 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1410 // when running asserts 1411 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1412 1413 Map<Long, Integer> position = new HashMap<>(); 1414 int index = 0; 1415 for (JpaPid next : thePids) { 1416 position.put(next.getId(), index++); 1417 } 1418 1419 // Can we fast track this loading by checking elastic search? 1420 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1421 if (isUsingElasticSearch) { 1422 try { 1423 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1424 return; 1425 1426 } catch (ResourceNotFoundInIndexException theE) { 1427 // some resources were not found in index, so we will inform this and resort to JPA search 1428 ourLog.warn( 1429 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1430 } 1431 } 1432 1433 // We only chunk because some jdbc drivers can't handle long param lists. 1434 QueryChunker.chunk( 1435 thePids, 1436 t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1437 } 1438 1439 /** 1440 * Check if we can load the resources from Hibernate Search instead of the database. 1441 * We assume this is faster. 1442 * <p> 1443 * Hibernate Search only stores the current version, and only if enabled. 1444 * 1445 * @param thePids the pids to check for versioned references 1446 * @return can we fetch from Hibernate Search? 1447 */ 1448 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1449 // is storage enabled? 1450 return myStorageSettings.isStoreResourceInHSearchIndex() 1451 && myStorageSettings.isHibernateSearchIndexSearchParams() 1452 && 1453 // we don't support history 1454 thePids.stream().noneMatch(p -> p.getVersion() != null) 1455 && 1456 // skip the complexity for metadata in dstu2 1457 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1458 } 1459 1460 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1461 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1462 // only impl 1463 // to handle lastN? 1464 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1465 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1466 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1467 1468 return myFulltextSearchSvc.getResources(pidList); 1469 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1470 // legacy LastN implementation 1471 return myIElasticsearchSvc.getObservationResources(thePids); 1472 } else { 1473 return Collections.emptyList(); 1474 } 1475 } 1476 1477 /** 1478 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1479 * so it can't be Collections.emptySet() or some such thing. 1480 * The JpaPid returned will have resource type populated. 1481 */ 1482 @Override 1483 public Set<JpaPid> loadIncludes( 1484 FhirContext theContext, 1485 EntityManager theEntityManager, 1486 Collection<JpaPid> theMatches, 1487 Collection<Include> theIncludes, 1488 boolean theReverseMode, 1489 DateRangeParam theLastUpdated, 1490 String theSearchIdOrDescription, 1491 RequestDetails theRequest, 1492 Integer theMaxCount) { 1493 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1494 parameters.setFhirContext(theContext); 1495 parameters.setEntityManager(theEntityManager); 1496 parameters.setMatches(theMatches); 1497 parameters.setIncludeFilters(theIncludes); 1498 parameters.setReverseMode(theReverseMode); 1499 parameters.setLastUpdated(theLastUpdated); 1500 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1501 parameters.setRequestDetails(theRequest); 1502 parameters.setMaxCount(theMaxCount); 1503 return loadIncludes(parameters); 1504 } 1505 1506 @Override 1507 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1508 Collection<JpaPid> matches = theParameters.getMatches(); 1509 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1510 boolean reverseMode = theParameters.isReverseMode(); 1511 EntityManager entityManager = theParameters.getEntityManager(); 1512 Integer maxCount = theParameters.getMaxCount(); 1513 FhirContext fhirContext = theParameters.getFhirContext(); 1514 RequestDetails request = theParameters.getRequestDetails(); 1515 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1516 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1517 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1518 IInterceptorBroadcaster compositeBroadcaster = 1519 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1520 1521 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1522 CurrentThreadCaptureQueriesListener.startCapturing(); 1523 } 1524 if (matches.isEmpty()) { 1525 return new HashSet<>(); 1526 } 1527 if (currentIncludes == null || currentIncludes.isEmpty()) { 1528 return new HashSet<>(); 1529 } 1530 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1531 String searchPartitionIdFieldName = 1532 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1533 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1534 String findPartitionIdFieldName = 1535 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1536 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1537 String findVersionFieldName = null; 1538 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1539 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1540 } 1541 1542 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1543 HashSet<JpaPid> allAdded = new HashSet<>(); 1544 HashSet<JpaPid> original = new HashSet<>(matches); 1545 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1546 1547 int roundCounts = 0; 1548 StopWatch w = new StopWatch(); 1549 1550 boolean addedSomeThisRound; 1551 do { 1552 roundCounts++; 1553 1554 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1555 1556 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1557 Include nextInclude = iter.next(); 1558 if (!nextInclude.isRecurse()) { 1559 iter.remove(); 1560 } 1561 1562 // Account for _include=* 1563 boolean matchAll = "*".equals(nextInclude.getValue()); 1564 1565 // Account for _include=[resourceType]:* 1566 String wantResourceType = null; 1567 if (!matchAll) { 1568 if ("*".equals(nextInclude.getParamName())) { 1569 wantResourceType = nextInclude.getParamType(); 1570 matchAll = true; 1571 } 1572 } 1573 1574 if (matchAll) { 1575 loadIncludesMatchAll( 1576 findPidFieldName, 1577 findPartitionIdFieldName, 1578 findResourceTypeFieldName, 1579 findVersionFieldName, 1580 searchPidFieldName, 1581 searchPartitionIdFieldName, 1582 wantResourceType, 1583 reverseMode, 1584 hasDesiredResourceTypes, 1585 nextRoundMatches, 1586 entityManager, 1587 maxCount, 1588 desiredResourceTypes, 1589 pidsToInclude, 1590 request); 1591 } else { 1592 loadIncludesMatchSpecific( 1593 nextInclude, 1594 fhirContext, 1595 findPidFieldName, 1596 findPartitionIdFieldName, 1597 findVersionFieldName, 1598 searchPidFieldName, 1599 reverseMode, 1600 nextRoundMatches, 1601 entityManager, 1602 maxCount, 1603 pidsToInclude, 1604 request); 1605 } 1606 } 1607 1608 nextRoundMatches.clear(); 1609 for (JpaPid next : pidsToInclude) { 1610 if (!original.contains(next) && !allAdded.contains(next)) { 1611 nextRoundMatches.add(next); 1612 } else { 1613 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1614 } 1615 } 1616 1617 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1618 1619 if (maxCount != null && allAdded.size() >= maxCount) { 1620 break; 1621 } 1622 1623 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1624 1625 allAdded.removeAll(original); 1626 1627 ourLog.info( 1628 "Loaded {} {} in {} rounds and {} ms for search {}", 1629 allAdded.size(), 1630 reverseMode ? "_revincludes" : "_includes", 1631 roundCounts, 1632 w.getMillisAndRestart(), 1633 searchIdOrDescription); 1634 1635 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1636 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1637 } 1638 1639 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1640 // This can be used to remove results from the search result details before 1641 // the user has a chance to know that they were in the results 1642 if (!allAdded.isEmpty()) { 1643 1644 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1645 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1646 JpaPreResourceAccessDetails accessDetails = 1647 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1648 HookParams params = new HookParams() 1649 .add(IPreResourceAccessDetails.class, accessDetails) 1650 .add(RequestDetails.class, request) 1651 .addIfMatchesType(ServletRequestDetails.class, request); 1652 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1653 1654 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1655 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1656 JpaPid value = includedPidList.remove(i); 1657 if (value != null) { 1658 allAdded.remove(value); 1659 } 1660 } 1661 } 1662 } 1663 } 1664 1665 return allAdded; 1666 } 1667 1668 private void loadIncludesMatchSpecific( 1669 Include nextInclude, 1670 FhirContext fhirContext, 1671 String findPidFieldName, 1672 String findPartitionFieldName, 1673 String findVersionFieldName, 1674 String searchPidFieldName, 1675 boolean reverseMode, 1676 List<JpaPid> nextRoundMatches, 1677 EntityManager entityManager, 1678 Integer maxCount, 1679 HashSet<JpaPid> pidsToInclude, 1680 RequestDetails theRequest) { 1681 List<String> paths; 1682 1683 // Start replace 1684 RuntimeSearchParam param; 1685 String resType = nextInclude.getParamType(); 1686 if (isBlank(resType)) { 1687 return; 1688 } 1689 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1690 if (def == null) { 1691 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1692 return; 1693 } 1694 1695 String paramName = nextInclude.getParamName(); 1696 if (isNotBlank(paramName)) { 1697 param = mySearchParamRegistry.getActiveSearchParam( 1698 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1699 } else { 1700 param = null; 1701 } 1702 if (param == null) { 1703 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1704 return; 1705 } 1706 1707 paths = param.getPathsSplitForResourceType(resType); 1708 // end replace 1709 1710 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1711 1712 for (String nextPath : paths) { 1713 String findPidFieldSqlColumn = 1714 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1715 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1716 if (findVersionFieldName != null) { 1717 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1718 } 1719 if (myPartitionSettings.isDatabasePartitionMode()) { 1720 fieldsToLoad += ", r."; 1721 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1722 ? "partition_id" 1723 : "target_res_partition_id"; 1724 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1725 } 1726 1727 // Query for includes lookup has 2 cases 1728 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1729 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1730 // url in target_resource_url 1731 1732 // Case 1: 1733 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1734 1735 String searchPidFieldSqlColumn = 1736 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1737 StringBuilder localReferenceQuery = new StringBuilder(); 1738 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1739 localReferenceQuery.append(" FROM hfj_res_link r "); 1740 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1741 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1742 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1743 } 1744 localReferenceQuery 1745 .append(" AND r.") 1746 .append(searchPidFieldSqlColumn) 1747 .append(" IN (:target_pids) "); 1748 if (myPartitionSettings.isDatabasePartitionMode()) { 1749 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1750 ? "target_res_partition_id" 1751 : "partition_id"; 1752 localReferenceQuery 1753 .append("AND r.") 1754 .append(partitionFieldToSearch) 1755 .append(" = :search_partition_id "); 1756 } 1757 localReferenceQueryParams.put("src_path", nextPath); 1758 // we loop over target_pids later. 1759 if (targetResourceTypes != null) { 1760 if (targetResourceTypes.size() == 1) { 1761 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1762 localReferenceQueryParams.put( 1763 "target_resource_type", 1764 targetResourceTypes.iterator().next()); 1765 } else { 1766 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1767 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1768 } 1769 } 1770 1771 // Case 2: 1772 Pair<String, Map<String, Object>> canonicalQuery = 1773 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest, param); 1774 1775 String sql = localReferenceQuery.toString(); 1776 if (canonicalQuery != null) { 1777 sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1778 } 1779 1780 Map<String, Object> limitParams = new HashMap<>(); 1781 if (maxCount != null) { 1782 LinkedList<Object> bindVariables = new LinkedList<>(); 1783 sql = SearchQueryBuilder.applyLimitToSql( 1784 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1785 1786 // The dialect SQL limiter uses positional params, but we're using 1787 // named params here, so we need to replace the positional params 1788 // with equivalent named ones 1789 StringBuilder sb = new StringBuilder(); 1790 for (int i = 0; i < sql.length(); i++) { 1791 char nextChar = sql.charAt(i); 1792 if (nextChar == '?') { 1793 String nextName = "limit" + i; 1794 sb.append(':').append(nextName); 1795 limitParams.put(nextName, bindVariables.removeFirst()); 1796 } else { 1797 sb.append(nextChar); 1798 } 1799 } 1800 sql = sb.toString(); 1801 } 1802 1803 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1804 for (Collection<JpaPid> nextPartition : partitions) { 1805 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1806 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1807 if (myPartitionSettings.isDatabasePartitionMode()) { 1808 q.setParameter( 1809 "search_partition_id", 1810 nextPartition.iterator().next().getPartitionId()); 1811 } 1812 localReferenceQueryParams.forEach(q::setParameter); 1813 if (canonicalQuery != null) { 1814 canonicalQuery.getRight().forEach(q::setParameter); 1815 } 1816 limitParams.forEach(q::setParameter); 1817 1818 try (ScrollableResultsIterator<Tuple> iter = new ScrollableResultsIterator<>(toScrollableResults(q))) { 1819 Tuple result; 1820 while (iter.hasNext()) { 1821 result = iter.next(); 1822 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1823 Long resourceVersion = null; 1824 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1825 resourceVersion = 1826 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1827 } 1828 Integer partitionId = null; 1829 if (myPartitionSettings.isDatabasePartitionMode()) { 1830 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1831 } 1832 1833 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1834 pid.setPartitionId(partitionId); 1835 pidsToInclude.add(pid); 1836 } 1837 } 1838 // myEntityManager.clear(); 1839 } 1840 } 1841 } 1842 1843 private void loadIncludesMatchAll( 1844 String findPidFieldName, 1845 String findPartitionFieldName, 1846 String findResourceTypeFieldName, 1847 String findVersionFieldName, 1848 String searchPidFieldName, 1849 String searchPartitionFieldName, 1850 String wantResourceType, 1851 boolean reverseMode, 1852 boolean hasDesiredResourceTypes, 1853 List<JpaPid> nextRoundMatches, 1854 EntityManager entityManager, 1855 Integer maxCount, 1856 List<String> desiredResourceTypes, 1857 HashSet<JpaPid> pidsToInclude, 1858 RequestDetails request) { 1859 1860 record IncludesRecord( 1861 Long resourceId, String resourceType, String resourceCanonicalUrl, Long version, Integer partitionId) {} 1862 1863 CriteriaBuilder cb = entityManager.getCriteriaBuilder(); 1864 CriteriaQuery<IncludesRecord> query = cb.createQuery(IncludesRecord.class); 1865 Root<ResourceLink> root = query.from(ResourceLink.class); 1866 1867 List<Selection<?>> selectionList = new ArrayList<>(); 1868 selectionList.add(root.get(findPidFieldName)); 1869 selectionList.add(root.get(findResourceTypeFieldName)); 1870 selectionList.add(root.get("myTargetResourceUrl")); 1871 if (findVersionFieldName != null) { 1872 selectionList.add(root.get(findVersionFieldName)); 1873 } else { 1874 selectionList.add(cb.nullLiteral(Long.class)); 1875 } 1876 if (myPartitionSettings.isDatabasePartitionMode()) { 1877 selectionList.add(root.get(findPartitionFieldName)); 1878 } else { 1879 selectionList.add(cb.nullLiteral(Integer.class)); 1880 } 1881 query.multiselect(selectionList); 1882 1883 List<Predicate> predicates = new ArrayList<>(); 1884 1885 if (myPartitionSettings.isDatabasePartitionMode()) { 1886 predicates.add( 1887 cb.equal(root.get(searchPartitionFieldName), cb.parameter(Integer.class, "target_partition_id"))); 1888 } 1889 1890 predicates.add(root.get(searchPidFieldName).in(cb.parameter(List.class, "target_pids"))); 1891 1892 /* 1893 * We need to set the resource type in 2 cases only: 1894 * 1) we are in $everything mode 1895 * (where we only want to fetch specific resource types, regardless of what is 1896 * available to fetch) 1897 * 2) we are doing revincludes 1898 * 1899 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1900 * should always be checking the source resource type on the resource link. We don't 1901 * actually index that column though by default, so in order to try and be efficient 1902 * we don't actually include it for includes (but we do for revincludes). This is 1903 * because for an include, it doesn't really make sense to include a different 1904 * resource type than the one you are searching on. 1905 */ 1906 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1907 // because mySourceResourceType is not part of the HFJ_RES_LINK 1908 // index, this might not be the most optimal performance. 1909 // but it is for an $everything operation (and maybe we should update the index) 1910 predicates.add( 1911 cb.equal(root.get("mySourceResourceType"), cb.parameter(String.class, "want_resource_type"))); 1912 } else { 1913 wantResourceType = null; 1914 } 1915 1916 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1917 // resources 1918 // (e.g. via Provenance, List, or Group) when in an $everything operation 1919 if (myParams != null 1920 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1921 predicates.add(cb.notEqual(root.get("myTargetResourceType"), "Patient")); 1922 predicates.add(cb.not(root.get("mySourceResourceType") 1923 .in(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE))); 1924 } 1925 1926 if (hasDesiredResourceTypes) { 1927 predicates.add( 1928 root.get("myTargetResourceType").in(cb.parameter(List.class, "desired_target_resource_types"))); 1929 } 1930 1931 query.where(cb.and(predicates.toArray(new Predicate[0]))); 1932 1933 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1934 for (Collection<JpaPid> nextPartition : partitions) { 1935 1936 TypedQuery<IncludesRecord> q = myEntityManager.createQuery(query); 1937 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1938 if (myPartitionSettings.isDatabasePartitionMode()) { 1939 q.setParameter( 1940 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1941 } 1942 if (wantResourceType != null) { 1943 q.setParameter("want_resource_type", wantResourceType); 1944 } 1945 if (maxCount != null) { 1946 q.setMaxResults(maxCount); 1947 } 1948 if (hasDesiredResourceTypes) { 1949 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1950 } 1951 1952 Set<String> canonicalUrls = null; 1953 1954 try (ScrollableResultsIterator<IncludesRecord> iter = 1955 new ScrollableResultsIterator<>(toScrollableResults(q))) { 1956 IncludesRecord nextRow; 1957 while (iter.hasNext()) { 1958 nextRow = iter.next(); 1959 if (nextRow == null) { 1960 // This can happen if there are outgoing references which are canonical or point to 1961 // other servers 1962 continue; 1963 } 1964 1965 Long version = nextRow.version; 1966 Long resourceId = nextRow.resourceId; 1967 String resourceType = nextRow.resourceType; 1968 String resourceCanonicalUrl = nextRow.resourceCanonicalUrl; 1969 Integer partitionId = nextRow.partitionId; 1970 1971 if (resourceId != null) { 1972 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1973 pid.setPartitionId(partitionId); 1974 pidsToInclude.add(pid); 1975 } else if (resourceCanonicalUrl != null) { 1976 if (canonicalUrls == null) { 1977 canonicalUrls = new HashSet<>(); 1978 } 1979 canonicalUrls.add(resourceCanonicalUrl); 1980 } 1981 } 1982 } 1983 1984 if (canonicalUrls != null) { 1985 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1986 } 1987 } 1988 } 1989 1990 private void loadCanonicalUrls( 1991 RequestDetails theRequestDetails, 1992 Set<String> theCanonicalUrls, 1993 EntityManager theEntityManager, 1994 HashSet<JpaPid> thePidsToInclude, 1995 boolean theReverse) { 1996 StringBuilder sqlBuilder; 1997 CanonicalUrlTargets canonicalUrlTargets = 1998 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 1999 if (canonicalUrlTargets.isEmpty()) { 2000 return; 2001 } 2002 2003 String message = 2004 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 2005 firePerformanceWarning(theRequestDetails, message); 2006 2007 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 2008 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.hashIdentityValues.size()); 2009 2010 sqlBuilder = new StringBuilder(); 2011 sqlBuilder.append("SELECT "); 2012 if (myPartitionSettings.isPartitioningEnabled()) { 2013 sqlBuilder.append("i.myPartitionIdValue, "); 2014 } 2015 sqlBuilder.append("i.myResourcePid "); 2016 2017 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 2018 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 2019 sqlBuilder.append("AND i.myUri IN (:uris)"); 2020 2021 String canonicalResSql = sqlBuilder.toString(); 2022 2023 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2024 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2025 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.hashIdentityValues); 2026 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2027 List<Object[]> results = canonicalResIdQuery.getResultList(); 2028 for (var next : results) { 2029 if (next != null) { 2030 Integer partitionId = null; 2031 Long pid; 2032 if (next.length == 1) { 2033 pid = (Long) next[0]; 2034 } else { 2035 partitionId = (Integer) ((Object[]) next)[0]; 2036 pid = (Long) ((Object[]) next)[1]; 2037 } 2038 if (pid != null) { 2039 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2040 } 2041 } 2042 } 2043 } 2044 } 2045 2046 /** 2047 * Calls Performance Trace Hook 2048 * 2049 * @param request the request deatils 2050 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2051 */ 2052 private void callRawSqlHookWithCurrentThreadQueries( 2053 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2054 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2055 HookParams params = new HookParams() 2056 .add(RequestDetails.class, request) 2057 .addIfMatchesType(ServletRequestDetails.class, request) 2058 .add(SqlQueryList.class, capturedQueries); 2059 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2060 } 2061 2062 @Nullable 2063 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2064 String targetResourceType = nextInclude.getParamTargetType(); 2065 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2066 Set<String> targetResourceTypes; 2067 if (targetResourceType != null) { 2068 targetResourceTypes = Set.of(targetResourceType); 2069 } else if (haveTargetTypesDefinedByParam) { 2070 targetResourceTypes = param.getTargets(); 2071 } else { 2072 // all types! 2073 targetResourceTypes = null; 2074 } 2075 return targetResourceTypes; 2076 } 2077 2078 @Nullable 2079 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2080 String theVersionFieldName, 2081 Set<String> theTargetResourceTypes, 2082 boolean theReverse, 2083 RequestDetails theRequest, 2084 RuntimeSearchParam theParam) { 2085 2086 String[] searchParameterPaths = SearchParameterUtil.splitSearchParameterExpressions(theParam.getPath()); 2087 2088 // If we know for sure that none of the paths involved in this SearchParameter could 2089 // be indexing a canonical 2090 if (Arrays.stream(searchParameterPaths) 2091 .noneMatch(t -> SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2092 myContext, myResourceName, t, theReverse))) { 2093 return null; 2094 } 2095 2096 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2097 if (theVersionFieldName != null) { 2098 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2099 fieldsToLoadFromSpidxUriTable += ", NULL"; 2100 } 2101 2102 if (myPartitionSettings.isDatabasePartitionMode()) { 2103 if (theReverse) { 2104 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2105 } else { 2106 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2107 } 2108 } 2109 2110 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2111 // But sp_name isn't indexed, so we use hash_identity instead. 2112 CanonicalUrlTargets canonicalUrlTargets = 2113 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2114 if (canonicalUrlTargets.isEmpty()) { 2115 return null; 2116 } 2117 2118 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2119 StringBuilder canonicalUrlQuery = new StringBuilder(); 2120 canonicalUrlQuery 2121 .append("SELECT ") 2122 .append(fieldsToLoadFromSpidxUriTable) 2123 .append(' '); 2124 canonicalUrlQuery.append("FROM hfj_res_link r "); 2125 2126 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2127 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2128 if (myPartitionSettings.isDatabasePartitionMode()) { 2129 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2130 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.partitionIds); 2131 } 2132 if (canonicalUrlTargets.hashIdentityValues.size() == 1) { 2133 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2134 canonicalUriQueryParams.put( 2135 "uri_identity_hash", 2136 canonicalUrlTargets.hashIdentityValues.iterator().next()); 2137 } else { 2138 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2139 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.hashIdentityValues); 2140 } 2141 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2142 canonicalUrlQuery.append(")"); 2143 2144 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2145 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2146 canonicalUrlQuery.append(" AND"); 2147 if (myPartitionSettings.isDatabasePartitionMode()) { 2148 if (theReverse) { 2149 canonicalUrlQuery.append(" rUri.partition_id"); 2150 } else { 2151 canonicalUrlQuery.append(" r.partition_id"); 2152 } 2153 canonicalUrlQuery.append(" = :search_partition_id"); 2154 canonicalUrlQuery.append(" AND"); 2155 } 2156 if (theReverse) { 2157 canonicalUrlQuery.append(" rUri.res_id"); 2158 } else { 2159 canonicalUrlQuery.append(" r.src_resource_id"); 2160 } 2161 canonicalUrlQuery.append(" IN (:target_pids)"); 2162 2163 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2164 } 2165 2166 @Nonnull 2167 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2168 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2169 Set<String> targetResourceTypes = theTargetResourceTypes; 2170 if (targetResourceTypes == null) { 2171 /* 2172 * If we don't have a list of valid target types, we need to figure out a list of all 2173 * possible target types in order to perform the search of the URI index table. This is 2174 * because the hash_identity column encodes the resource type, so we'll need a hash 2175 * value for each possible target type. 2176 */ 2177 targetResourceTypes = new HashSet<>(); 2178 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2179 if (theReverse) { 2180 // For reverse includes, it is really hard to figure out what types 2181 // are actually potentially pointing to the type we're searching for 2182 // in this context, so let's just assume it could be anything. 2183 targetResourceTypes = possibleTypes; 2184 } else { 2185 List<RuntimeSearchParam> params = mySearchParamRegistry 2186 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2187 .values() 2188 .stream() 2189 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2190 .toList(); 2191 for (var next : params) { 2192 2193 String paths = next.getPath(); 2194 for (String path : SearchParameterUtil.splitSearchParameterExpressions(paths)) { 2195 2196 if (!SearchParameterUtil.referencePathCouldPotentiallyReferenceCanonicalElement( 2197 myContext, myResourceName, path, theReverse)) { 2198 continue; 2199 } 2200 2201 if (!next.getTargets().isEmpty()) { 2202 // For each reference parameter on the resource type we're searching for, 2203 // add all the potential target types to the list of possible target 2204 // resource types we can look up. 2205 for (var nextTarget : next.getTargets()) { 2206 if (possibleTypes.contains(nextTarget)) { 2207 targetResourceTypes.add(nextTarget); 2208 } 2209 } 2210 } else { 2211 // If we have any references that don't define any target types, then 2212 // we need to assume that all enabled resource types are possible target 2213 // types 2214 targetResourceTypes.addAll(possibleTypes); 2215 break; 2216 } 2217 } 2218 } 2219 } 2220 } 2221 2222 if (targetResourceTypes.isEmpty()) { 2223 return new CanonicalUrlTargets(Set.of(), Set.of()); 2224 } 2225 2226 Set<Long> hashIdentityValues = new HashSet<>(); 2227 Set<Integer> partitionIds = new HashSet<>(); 2228 for (String type : targetResourceTypes) { 2229 2230 RequestPartitionId readPartition; 2231 if (myPartitionSettings.isPartitioningEnabled()) { 2232 readPartition = 2233 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2234 } else { 2235 readPartition = RequestPartitionId.defaultPartition(); 2236 } 2237 if (readPartition.hasPartitionIds()) { 2238 partitionIds.addAll(readPartition.getPartitionIds()); 2239 } 2240 2241 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2242 myPartitionSettings, readPartition, type, "url"); 2243 hashIdentityValues.add(hashIdentity); 2244 } 2245 2246 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2247 } 2248 2249 record CanonicalUrlTargets(@Nonnull Set<Long> hashIdentityValues, @Nonnull Set<Integer> partitionIds) { 2250 public boolean isEmpty() { 2251 return hashIdentityValues.isEmpty(); 2252 } 2253 } 2254 2255 /** 2256 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2257 * those pids where: 2258 * <ul> 2259 * <li>No single list is more than {@literal theMaxLoad} entries</li> 2260 * <li>Each list only contains JpaPids with the same partition ID</li> 2261 * </ul> 2262 */ 2263 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2264 2265 if (theNextRoundMatches.size() <= theMaxLoad) { 2266 boolean allSamePartition = true; 2267 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2268 if (!Objects.equals( 2269 theNextRoundMatches.get(i - 1).getPartitionId(), 2270 theNextRoundMatches.get(i).getPartitionId())) { 2271 allSamePartition = false; 2272 break; 2273 } 2274 } 2275 if (allSamePartition) { 2276 return Collections.singletonList(theNextRoundMatches); 2277 } 2278 } 2279 2280 // Break into partitioned sublists 2281 ListMultimap<String, JpaPid> lists = 2282 MultimapBuilder.hashKeys().arrayListValues().build(); 2283 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2284 String partitionId = nextRoundMatch.getPartitionId() != null 2285 ? nextRoundMatch.getPartitionId().toString() 2286 : ""; 2287 lists.put(partitionId, nextRoundMatch); 2288 } 2289 2290 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2291 for (String key : lists.keySet()) { 2292 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2293 retVal.addAll(nextPartition); 2294 } 2295 2296 // In unit test mode, we sort the results just for unit test predictability 2297 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2298 retVal = retVal.stream() 2299 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2300 .collect(Collectors.toList()); 2301 } 2302 2303 return retVal; 2304 } 2305 2306 private void attemptComboUniqueSpProcessing( 2307 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2308 RuntimeSearchParam comboParam = null; 2309 List<String> comboParamNames = null; 2310 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2311 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2312 if (!exactMatchParams.isEmpty()) { 2313 comboParam = exactMatchParams.get(0); 2314 comboParamNames = new ArrayList<>(theParams.keySet()); 2315 } 2316 2317 if (comboParam == null) { 2318 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2319 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2320 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2321 List<String> nextCandidateParamNames = 2322 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2323 .map(RuntimeSearchParam::getName) 2324 .collect(Collectors.toList()); 2325 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2326 comboParam = nextCandidate; 2327 comboParamNames = nextCandidateParamNames; 2328 break; 2329 } 2330 } 2331 } 2332 2333 if (comboParam != null) { 2334 Collections.sort(comboParamNames); 2335 2336 // Since we're going to remove elements below 2337 theParams.values().forEach(this::ensureSubListsAreWritable); 2338 2339 /* 2340 * Apply search against the combo param index in a loop: 2341 * 2342 * 1. First we check whether the actual parameter values in the 2343 * parameter map are actually usable for searching against the combo 2344 * param index. E.g. no search modifiers, date comparators, etc., 2345 * since these mean you can't use the combo index. 2346 * 2347 * 2. Apply and create the join SQl. We remove parameter values from 2348 * the map as we apply them, so any parameter values remaining in the 2349 * map after each loop haven't yet been factored into the SQL. 2350 * 2351 * The loop allows us to create multiple combo index joins if there 2352 * are multiple AND expressions for the related parameters. 2353 */ 2354 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2355 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2356 } 2357 } 2358 } 2359 2360 private void applyComboSearchParam( 2361 QueryStack theQueryStack, 2362 @Nonnull SearchParameterMap theParams, 2363 RequestDetails theRequest, 2364 List<String> theComboParamNames, 2365 RuntimeSearchParam theComboParam) { 2366 2367 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2368 for (String nextParamName : theComboParamNames) { 2369 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2370 inputs.add(nextValues); 2371 } 2372 2373 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2374 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2375 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2376 2377 StringBuilder searchStringBuilder = new StringBuilder(); 2378 searchStringBuilder.append(myResourceName); 2379 searchStringBuilder.append("?"); 2380 2381 boolean first = true; 2382 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2383 2384 String nextParamName = theComboParamNames.get(paramIndex); 2385 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2386 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2387 // As a result, we strip the prefix if present. 2388 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2389 2390 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2391 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2392 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2393 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2394 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2395 } 2396 } 2397 2398 if (first) { 2399 first = false; 2400 } else { 2401 searchStringBuilder.append('&'); 2402 } 2403 2404 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2405 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2406 2407 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2408 } 2409 2410 String indexString = searchStringBuilder.toString(); 2411 ourLog.debug( 2412 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2413 2414 indexStrings.add(indexString); 2415 } 2416 2417 // Just to make sure we're stable for tests 2418 indexStrings.sort(Comparator.naturalOrder()); 2419 2420 // Interceptor broadcast: JPA_PERFTRACE_INFO 2421 IInterceptorBroadcaster compositeBroadcaster = 2422 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2423 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2424 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2425 StorageProcessingMessage msg = new StorageProcessingMessage() 2426 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2427 + indexStringForLog); 2428 HookParams params = new HookParams() 2429 .add(RequestDetails.class, theRequest) 2430 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2431 .add(StorageProcessingMessage.class, msg); 2432 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2433 } 2434 2435 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2436 case UNIQUE: 2437 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2438 break; 2439 case NON_UNIQUE: 2440 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2441 break; 2442 } 2443 2444 // Remove any empty parameters remaining after this 2445 theParams.clean(); 2446 } 2447 2448 /** 2449 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2450 * searching against a combo param with the given parameter names. This might be {@literal false} if 2451 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2452 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2453 */ 2454 private boolean validateParamValuesAreValidForComboParam( 2455 RequestDetails theRequest, 2456 @Nonnull SearchParameterMap theParams, 2457 List<String> theComboParamNames, 2458 RuntimeSearchParam theComboParam) { 2459 boolean paramValuesAreValidForCombo = true; 2460 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2461 2462 for (String nextParamName : theComboParamNames) { 2463 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2464 2465 if (nextValues == null || nextValues.isEmpty()) { 2466 paramValuesAreValidForCombo = false; 2467 break; 2468 } 2469 2470 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2471 paramOrValues.add(nextAndValue); 2472 2473 for (IQueryParameterType nextOrValue : nextAndValue) { 2474 if (nextOrValue instanceof DateParam dateParam) { 2475 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2476 String message = "Search with params " + theComboParamNames 2477 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2478 + nextParamName + "'"; 2479 firePerformanceInfo(theRequest, message); 2480 paramValuesAreValidForCombo = false; 2481 break; 2482 } 2483 } 2484 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2485 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2486 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2487 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2488 if (prefix != null && prefix != EQUAL) { 2489 String message = "Search with params " + theComboParamNames 2490 + " is not a candidate for combo searching - Parameter '" + nextParamName 2491 + "' has prefix: '" 2492 + paramWithPrefix.getPrefix().getValue() + "'"; 2493 firePerformanceInfo(theRequest, message); 2494 paramValuesAreValidForCombo = false; 2495 break; 2496 } 2497 } 2498 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2499 String message = "Search with params " + theComboParamNames 2500 + " is not a candidate for combo searching - Parameter '" + nextParamName 2501 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2502 firePerformanceInfo(theRequest, message); 2503 paramValuesAreValidForCombo = false; 2504 break; 2505 } 2506 } 2507 2508 // Reference params are only eligible for using a composite index if they 2509 // are qualified 2510 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2511 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2512 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2513 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2514 if (isBlank(param.getResourceType())) { 2515 ourLog.debug( 2516 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2517 paramValuesAreValidForCombo = false; 2518 break; 2519 } 2520 } 2521 2522 // Date params are not eligible for using composite unique index 2523 // as index could contain date with different precision (e.g. DAY, SECOND) 2524 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2525 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2526 ourLog.debug( 2527 "Search with params {} is not a candidate for combo searching - " 2528 + "Unique combo search parameter '{}' has DATE type", 2529 theComboParamNames, 2530 nextParamName); 2531 paramValuesAreValidForCombo = false; 2532 break; 2533 } 2534 } 2535 2536 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2537 ourLog.debug( 2538 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2539 paramValuesAreValidForCombo = false; 2540 } 2541 2542 return paramValuesAreValidForCombo; 2543 } 2544 2545 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2546 for (int i = 0; i < theListOfLists.size(); i++) { 2547 List<T> oldSubList = theListOfLists.get(i); 2548 if (!(oldSubList instanceof ArrayList)) { 2549 List<T> newSubList = new ArrayList<>(oldSubList); 2550 theListOfLists.set(i, newSubList); 2551 } 2552 } 2553 } 2554 2555 @Override 2556 public void setFetchSize(int theFetchSize) { 2557 myFetchSize = theFetchSize; 2558 } 2559 2560 public SearchParameterMap getParams() { 2561 return myParams; 2562 } 2563 2564 public CriteriaBuilder getBuilder() { 2565 return myCriteriaBuilder; 2566 } 2567 2568 public Class<? extends IBaseResource> getResourceType() { 2569 return myResourceType; 2570 } 2571 2572 public String getResourceName() { 2573 return myResourceName; 2574 } 2575 2576 /** 2577 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2578 */ 2579 private class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2580 2581 private final RequestDetails myRequest; 2582 private final Set<JpaPid> myCurrentPids; 2583 private Iterator<JpaPid> myCurrentIterator; 2584 private JpaPid myNext; 2585 2586 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2587 myCurrentPids = new HashSet<>(thePidSet); 2588 myCurrentIterator = null; 2589 myRequest = theRequest; 2590 } 2591 2592 private void fetchNext() { 2593 while (myNext == null) { 2594 2595 if (myCurrentIterator == null) { 2596 Set<Include> includes = new HashSet<>(); 2597 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2598 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2599 for (IQueryParameterType type : typeList) { 2600 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2601 for (String resourceType : queryString.split(",")) { 2602 String rt = resourceType.trim(); 2603 if (isNotBlank(rt)) { 2604 includes.add(new Include(rt + ":*", true)); 2605 } 2606 } 2607 } 2608 } 2609 } 2610 if (includes.isEmpty()) { 2611 includes.add(new Include("*", true)); 2612 } 2613 Set<JpaPid> newPids = loadIncludes( 2614 myContext, 2615 myEntityManager, 2616 myCurrentPids, 2617 includes, 2618 false, 2619 getParams().getLastUpdated(), 2620 mySearchUuid, 2621 myRequest, 2622 null); 2623 myCurrentIterator = newPids.iterator(); 2624 } 2625 2626 if (myCurrentIterator.hasNext()) { 2627 myNext = myCurrentIterator.next(); 2628 } else { 2629 myNext = NO_MORE; 2630 } 2631 } 2632 } 2633 2634 @Override 2635 public boolean hasNext() { 2636 fetchNext(); 2637 return !NO_MORE.equals(myNext); 2638 } 2639 2640 @Override 2641 public JpaPid next() { 2642 fetchNext(); 2643 JpaPid retVal = myNext; 2644 myNext = null; 2645 return retVal; 2646 } 2647 } 2648 /** 2649 * Basic Query iterator, used to fetch the results of a query. 2650 */ 2651 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2652 2653 private final SearchRuntimeDetails mySearchRuntimeDetails; 2654 2655 private final RequestDetails myRequest; 2656 private final boolean myHaveRawSqlHooks; 2657 private final boolean myHavePerfTraceFoundIdHook; 2658 private final SortSpec mySort; 2659 private final Integer myOffset; 2660 private final IInterceptorBroadcaster myCompositeBroadcaster; 2661 private boolean myFirst = true; 2662 private IncludesIterator myIncludesIterator; 2663 /** 2664 * The next JpaPid value of the next result in this query. 2665 * Will not be null if fetched using getNext() 2666 */ 2667 private JpaPid myNext; 2668 /** 2669 * The current query result iterator running sql and supplying PIDs 2670 * @see #myQueryList 2671 */ 2672 private ISearchQueryExecutor myResultsIterator; 2673 2674 private boolean myFetchIncludesForEverythingOperation; 2675 2676 /** 2677 * The count of resources skipped because they were seen in earlier results 2678 */ 2679 private int mySkipCount = 0; 2680 /** 2681 * The count of resources that are new in this search 2682 * (ie, not cached in previous searches) 2683 */ 2684 private int myNonSkipCount = 0; 2685 /** 2686 * The list of queries to use to find all results. 2687 * Normal JPA queries will normally have a single entry. 2688 * Queries that involve Hibernate Search/Elasticsearch may have 2689 * multiple queries because of chunking. 2690 * The $everything operation also jams some extra results in. 2691 */ 2692 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2693 2694 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2695 mySearchRuntimeDetails = theSearchRuntimeDetails; 2696 mySort = myParams.getSort(); 2697 myOffset = myParams.getOffset(); 2698 myRequest = theRequest; 2699 myCompositeBroadcaster = 2700 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2701 2702 // everything requires fetching recursively all related resources 2703 if (myParams.getEverythingMode() != null) { 2704 myFetchIncludesForEverythingOperation = true; 2705 } 2706 2707 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2708 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2709 } 2710 2711 private void fetchNext() { 2712 try { 2713 if (myHaveRawSqlHooks) { 2714 CurrentThreadCaptureQueriesListener.startCapturing(); 2715 } 2716 2717 // If we don't have a query yet, create one 2718 if (myResultsIterator == null) { 2719 if (!mySearchProperties.hasMaxResultsRequested()) { 2720 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2721 } 2722 2723 /* 2724 * assigns the results iterator 2725 * and populates the myQueryList. 2726 */ 2727 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2728 } 2729 2730 if (myNext == null) { 2731 // no next means we need a new query (if one is available) 2732 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2733 /* 2734 * Because we combine our DB searches with Lucene 2735 * sometimes we can have multiple results iterators 2736 * (with only some having data in them to extract). 2737 * 2738 * We'll iterate our results iterators until we 2739 * either run out of results iterators, or we 2740 * have one that actually has data in it. 2741 */ 2742 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2743 retrieveNextIteratorQuery(); 2744 } 2745 2746 if (!myResultsIterator.hasNext()) { 2747 // we couldn't find a results iterator; 2748 // we're done here 2749 break; 2750 } 2751 2752 JpaPid nextPid = myResultsIterator.next(); 2753 if (myHavePerfTraceFoundIdHook) { 2754 callPerformanceTracingHook(nextPid); 2755 } 2756 2757 if (nextPid != null) { 2758 if (!myPidSet.contains(nextPid)) { 2759 if (!mySearchProperties.isDeduplicateInDatabase()) { 2760 /* 2761 * We only add to the map if we aren't fetching "everything"; 2762 * otherwise, we let the de-duplication happen in the database 2763 * (see createChunkedQueryNormalSearch above), because it 2764 * saves memory that way. 2765 */ 2766 myPidSet.add(nextPid); 2767 } 2768 if (doNotSkipNextPidForEverything()) { 2769 myNext = nextPid; 2770 myNonSkipCount++; 2771 break; 2772 } 2773 } else { 2774 mySkipCount++; 2775 } 2776 } 2777 2778 if (!myResultsIterator.hasNext()) { 2779 if (mySearchProperties.hasMaxResultsRequested() 2780 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2781 if (mySkipCount > 0 && myNonSkipCount == 0) { 2782 sendProcessingMsgAndFirePerformanceHook(); 2783 // need the next iterator; increase the maxsize 2784 // (we should always do this) 2785 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2786 mySearchProperties.setMaxResultsRequested(maxResults); 2787 2788 if (!mySearchProperties.isDeduplicateInDatabase()) { 2789 // if we're not using the database to deduplicate 2790 // we should recheck our memory usage 2791 // the prefetch size check is future proofing 2792 int prefetchSize = myStorageSettings 2793 .getSearchPreFetchThresholds() 2794 .size(); 2795 if (prefetchSize > 0) { 2796 if (myStorageSettings 2797 .getSearchPreFetchThresholds() 2798 .get(prefetchSize - 1) 2799 < mySearchProperties.getMaxResultsRequested()) { 2800 mySearchProperties.setDeduplicateInDatabase(true); 2801 } 2802 } 2803 } 2804 2805 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2806 } 2807 } 2808 } 2809 } 2810 } 2811 2812 if (myNext == null) { 2813 // if we got here, it means the current JpaPid has already been processed, 2814 // and we will decide (here) if we need to fetch related resources recursively 2815 if (myFetchIncludesForEverythingOperation) { 2816 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2817 myFetchIncludesForEverythingOperation = false; 2818 } 2819 if (myIncludesIterator != null) { 2820 while (myIncludesIterator.hasNext()) { 2821 JpaPid next = myIncludesIterator.next(); 2822 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2823 myNext = next; 2824 break; 2825 } 2826 } 2827 if (myNext == null) { 2828 myNext = NO_MORE; 2829 } 2830 } else { 2831 myNext = NO_MORE; 2832 } 2833 } 2834 2835 if (!mySearchProperties.hasMaxResultsRequested()) { 2836 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2837 } else { 2838 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2839 } 2840 2841 } finally { 2842 // search finished - fire hooks 2843 if (myHaveRawSqlHooks) { 2844 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2845 } 2846 } 2847 2848 if (myFirst) { 2849 HookParams params = new HookParams() 2850 .add(RequestDetails.class, myRequest) 2851 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2852 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2853 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2854 myFirst = false; 2855 } 2856 2857 if (NO_MORE.equals(myNext)) { 2858 HookParams params = new HookParams() 2859 .add(RequestDetails.class, myRequest) 2860 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2861 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2862 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2863 } 2864 } 2865 2866 private Integer calculateMaxResultsToFetch() { 2867 if (myParams.getLoadSynchronousUpTo() != null) { 2868 return myParams.getLoadSynchronousUpTo(); 2869 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2870 return myParams.getEverythingMode() != null 2871 ? myParams.getOffset() + myParams.getCount() 2872 : myParams.getCount(); 2873 } else { 2874 return myStorageSettings.getFetchSizeDefaultMaximum(); 2875 } 2876 } 2877 2878 private boolean doNotSkipNextPidForEverything() { 2879 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2880 } 2881 2882 private void callPerformanceTracingHook(JpaPid theNextPid) { 2883 HookParams params = new HookParams() 2884 .add(Integer.class, System.identityHashCode(this)) 2885 .add(Object.class, theNextPid); 2886 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2887 } 2888 2889 private void sendProcessingMsgAndFirePerformanceHook() { 2890 String msg = "Pass completed with no matching results seeking rows " 2891 + myPidSet.size() + "-" + mySkipCount 2892 + ". This indicates an inefficient query! Retrying with new max count of " 2893 + mySearchProperties.getMaxResultsRequested(); 2894 firePerformanceWarning(myRequest, msg); 2895 } 2896 2897 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2898 Integer offset = theOffset; 2899 if (myQueryList.isEmpty()) { 2900 // Capture times for Lucene/Elasticsearch queries as well 2901 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2902 2903 // setting offset to 0 to fetch all resource ids to guarantee 2904 // correct output result for everything operation during paging 2905 if (myParams.getEverythingMode() != null) { 2906 offset = 0; 2907 } 2908 2909 SearchQueryProperties properties = mySearchProperties.clone(); 2910 properties 2911 .setOffset(offset) 2912 .setMaxResultsRequested(theMaxResultsToFetch) 2913 .setDoCountOnlyFlag(false) 2914 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2915 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2916 } 2917 2918 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2919 2920 retrieveNextIteratorQuery(); 2921 2922 mySkipCount = 0; 2923 myNonSkipCount = 0; 2924 } 2925 2926 private void retrieveNextIteratorQuery() { 2927 close(); 2928 if (isNotEmpty(myQueryList)) { 2929 myResultsIterator = myQueryList.remove(0); 2930 myHasNextIteratorQuery = true; 2931 } else { 2932 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2933 myHasNextIteratorQuery = false; 2934 } 2935 } 2936 2937 @Override 2938 public boolean hasNext() { 2939 if (myNext == null) { 2940 fetchNext(); 2941 } 2942 return !NO_MORE.equals(myNext); 2943 } 2944 2945 @Override 2946 public JpaPid next() { 2947 fetchNext(); 2948 JpaPid retVal = myNext; 2949 myNext = null; 2950 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2951 return retVal; 2952 } 2953 2954 @Override 2955 public int getSkippedCount() { 2956 return mySkipCount; 2957 } 2958 2959 @Override 2960 public int getNonSkippedCount() { 2961 return myNonSkipCount; 2962 } 2963 2964 @Override 2965 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2966 Collection<JpaPid> batch = new ArrayList<>(); 2967 while (this.hasNext() && batch.size() < theBatchSize) { 2968 batch.add(this.next()); 2969 } 2970 return batch; 2971 } 2972 2973 @Override 2974 public void close() { 2975 if (myResultsIterator != null) { 2976 myResultsIterator.close(); 2977 } 2978 myResultsIterator = null; 2979 } 2980 } 2981 2982 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2983 // Only log at debug level since these messages aren't considered important enough 2984 // that we should be cluttering the system log, but they are important to the 2985 // specific query being executed to we'll INFO level them there 2986 ourLog.debug(theMessage); 2987 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2988 } 2989 2990 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2991 ourLog.warn(theMessage); 2992 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2993 } 2994 2995 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 2996 IInterceptorBroadcaster compositeBroadcaster = 2997 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2998 if (compositeBroadcaster.hasHooks(thePointcut)) { 2999 StorageProcessingMessage message = new StorageProcessingMessage(); 3000 message.setMessage(theMessage); 3001 HookParams params = new HookParams() 3002 .add(RequestDetails.class, theRequest) 3003 .addIfMatchesType(ServletRequestDetails.class, theRequest) 3004 .add(StorageProcessingMessage.class, message); 3005 compositeBroadcaster.callHooks(thePointcut, params); 3006 } 3007 } 3008 3009 public static int getMaximumPageSize() { 3010 if (myMaxPageSizeForTests != null) { 3011 return myMaxPageSizeForTests; 3012 } 3013 return MAXIMUM_PAGE_SIZE; 3014 } 3015 3016 public static void setMaxPageSizeForTest(Integer theTestSize) { 3017 myMaxPageSizeForTests = theTestSize; 3018 } 3019 3020 private static ScrollableResults<?> toScrollableResults(Query theQuery) { 3021 org.hibernate.query.Query<?> hibernateQuery = (org.hibernate.query.Query<?>) theQuery; 3022 return hibernateQuery.scroll(ScrollMode.FORWARD_ONLY); 3023 } 3024}