
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.BaseRuntimeChildDefinition; 023import ca.uhn.fhir.context.BaseRuntimeElementDefinition; 024import ca.uhn.fhir.context.ComboSearchParamType; 025import ca.uhn.fhir.context.FhirContext; 026import ca.uhn.fhir.context.FhirVersionEnum; 027import ca.uhn.fhir.context.RuntimeResourceDefinition; 028import ca.uhn.fhir.context.RuntimeSearchParam; 029import ca.uhn.fhir.i18n.Msg; 030import ca.uhn.fhir.interceptor.api.HookParams; 031import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 032import ca.uhn.fhir.interceptor.api.Pointcut; 033import ca.uhn.fhir.interceptor.model.RequestPartitionId; 034import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 035import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 036import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 037import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 038import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 039import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 040import ca.uhn.fhir.jpa.dao.BaseStorageDao; 041import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 042import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 043import ca.uhn.fhir.jpa.dao.IResultIterator; 044import ca.uhn.fhir.jpa.dao.ISearchBuilder; 045import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 046import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 047import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 048import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 049import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 050import ca.uhn.fhir.jpa.model.config.PartitionSettings; 051import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 052import ca.uhn.fhir.jpa.model.dao.JpaPid; 053import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 054import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 055import ca.uhn.fhir.jpa.model.entity.BaseTag; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 057import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 058import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 059import ca.uhn.fhir.jpa.model.entity.ResourceTag; 060import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 061import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 062import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.StopWatch; 108import ca.uhn.fhir.util.StringUtil; 109import ca.uhn.fhir.util.UrlUtil; 110import com.google.common.annotations.VisibleForTesting; 111import com.google.common.collect.ListMultimap; 112import com.google.common.collect.Lists; 113import com.google.common.collect.MultimapBuilder; 114import com.healthmarketscience.sqlbuilder.Condition; 115import jakarta.annotation.Nonnull; 116import jakarta.annotation.Nullable; 117import jakarta.persistence.EntityManager; 118import jakarta.persistence.PersistenceContext; 119import jakarta.persistence.PersistenceContextType; 120import jakarta.persistence.Query; 121import jakarta.persistence.Tuple; 122import jakarta.persistence.TypedQuery; 123import jakarta.persistence.criteria.CriteriaBuilder; 124import org.apache.commons.collections4.ListUtils; 125import org.apache.commons.lang3.StringUtils; 126import org.apache.commons.lang3.Validate; 127import org.apache.commons.lang3.math.NumberUtils; 128import org.apache.commons.lang3.tuple.Pair; 129import org.hl7.fhir.instance.model.api.IAnyResource; 130import org.hl7.fhir.instance.model.api.IBaseResource; 131import org.hl7.fhir.instance.model.api.IIdType; 132import org.slf4j.Logger; 133import org.slf4j.LoggerFactory; 134import org.springframework.beans.factory.annotation.Autowired; 135import org.springframework.jdbc.core.JdbcTemplate; 136import org.springframework.transaction.support.TransactionSynchronizationManager; 137 138import java.util.ArrayList; 139import java.util.Collection; 140import java.util.Collections; 141import java.util.Comparator; 142import java.util.HashMap; 143import java.util.HashSet; 144import java.util.Iterator; 145import java.util.LinkedList; 146import java.util.List; 147import java.util.Map; 148import java.util.Objects; 149import java.util.Set; 150import java.util.stream.Collectors; 151 152import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 153import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 154import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 155import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 156import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 157import static java.util.Objects.requireNonNull; 158import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 159import static org.apache.commons.lang3.StringUtils.isBlank; 160import static org.apache.commons.lang3.StringUtils.isNotBlank; 161import static org.apache.commons.lang3.StringUtils.stripStart; 162 163/** 164 * The SearchBuilder is responsible for actually forming the SQL query that handles 165 * searches for resources 166 */ 167public class SearchBuilder implements ISearchBuilder<JpaPid> { 168 169 /** 170 * See loadResourcesByPid 171 * for an explanation of why we use the constant 800 172 */ 173 // NB: keep public 174 @Deprecated 175 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 176 177 public static final String RESOURCE_ID_ALIAS = "resource_id"; 178 public static final String PARTITION_ID_ALIAS = "partition_id"; 179 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 180 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 181 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 182 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 183 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 184 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 185 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 186 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 187 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 188 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 189 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 190 public static boolean myUseMaxPageSize50ForTest = false; 191 public static Integer myMaxPageSizeForTests = null; 192 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 193 protected final IResourceTagDao myResourceTagDao; 194 private String myResourceName; 195 private final Class<? extends IBaseResource> myResourceType; 196 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 197 private final SqlObjectFactory mySqlBuilderFactory; 198 private final HibernatePropertiesProvider myDialectProvider; 199 private final ISearchParamRegistry mySearchParamRegistry; 200 private final PartitionSettings myPartitionSettings; 201 private final DaoRegistry myDaoRegistry; 202 private final FhirContext myContext; 203 private final IIdHelperService<JpaPid> myIdHelperService; 204 private final JpaStorageSettings myStorageSettings; 205 private final SearchQueryProperties mySearchProperties; 206 private final IResourceHistoryTableDao myResourceHistoryTableDao; 207 private final IJpaStorageResourceParser myJpaStorageResourceParser; 208 209 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 210 protected EntityManager myEntityManager; 211 212 private CriteriaBuilder myCriteriaBuilder; 213 private SearchParameterMap myParams; 214 private String mySearchUuid; 215 private int myFetchSize; 216 217 private boolean myRequiresTotal; 218 219 /** 220 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 221 */ 222 private Set<JpaPid> myPidSet; 223 224 private boolean myHasNextIteratorQuery = false; 225 private RequestPartitionId myRequestPartitionId; 226 227 private IFulltextSearchSvc myFulltextSearchSvc; 228 229 @Autowired(required = false) 230 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 231 myFulltextSearchSvc = theFulltextSearchSvc; 232 } 233 234 @Autowired(required = false) 235 private IElasticsearchSvc myIElasticsearchSvc; 236 237 @Autowired 238 private IResourceHistoryTagDao myResourceHistoryTagDao; 239 240 @Autowired 241 private IRequestPartitionHelperSvc myPartitionHelperSvc; 242 243 /** 244 * Constructor 245 */ 246 @SuppressWarnings({"rawtypes", "unchecked"}) 247 public SearchBuilder( 248 String theResourceName, 249 JpaStorageSettings theStorageSettings, 250 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 251 SqlObjectFactory theSqlBuilderFactory, 252 HibernatePropertiesProvider theDialectProvider, 253 ISearchParamRegistry theSearchParamRegistry, 254 PartitionSettings thePartitionSettings, 255 IInterceptorBroadcaster theInterceptorBroadcaster, 256 IResourceTagDao theResourceTagDao, 257 DaoRegistry theDaoRegistry, 258 FhirContext theContext, 259 IIdHelperService theIdHelperService, 260 IResourceHistoryTableDao theResourceHistoryTagDao, 261 IJpaStorageResourceParser theIJpaStorageResourceParser, 262 Class<? extends IBaseResource> theResourceType) { 263 myResourceName = theResourceName; 264 myResourceType = theResourceType; 265 myStorageSettings = theStorageSettings; 266 267 myEntityManagerFactory = theEntityManagerFactory; 268 mySqlBuilderFactory = theSqlBuilderFactory; 269 myDialectProvider = theDialectProvider; 270 mySearchParamRegistry = theSearchParamRegistry; 271 myPartitionSettings = thePartitionSettings; 272 myInterceptorBroadcaster = theInterceptorBroadcaster; 273 myResourceTagDao = theResourceTagDao; 274 myDaoRegistry = theDaoRegistry; 275 myContext = theContext; 276 myIdHelperService = theIdHelperService; 277 myResourceHistoryTableDao = theResourceHistoryTagDao; 278 myJpaStorageResourceParser = theIJpaStorageResourceParser; 279 280 mySearchProperties = new SearchQueryProperties(); 281 } 282 283 @VisibleForTesting 284 void setResourceName(String theName) { 285 myResourceName = theName; 286 } 287 288 @Override 289 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 290 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 291 } 292 293 @Override 294 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 295 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 296 } 297 298 @Override 299 public void setRequireTotal(boolean theRequireTotal) { 300 myRequiresTotal = theRequireTotal; 301 } 302 303 @Override 304 public boolean requiresTotal() { 305 return myRequiresTotal; 306 } 307 308 private void searchForIdsWithAndOr( 309 SearchQueryBuilder theSearchSqlBuilder, 310 QueryStack theQueryStack, 311 @Nonnull SearchParameterMap theParams, 312 RequestDetails theRequest) { 313 myParams = theParams; 314 mySearchProperties.setSortSpec(myParams.getSort()); 315 316 // Remove any empty parameters 317 theParams.clean(); 318 319 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 320 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 321 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 322 } 323 324 // Attempt to lookup via composite unique key. 325 if (isCompositeUniqueSpCandidate()) { 326 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 327 } 328 329 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 330 List<String> paramNames = myParams.keySet().stream() 331 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 332 .filter(t -> !t.equals(Constants.PARAM_TAG)) 333 .collect(Collectors.toList()); 334 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 335 paramNames.add(IAnyResource.SP_RES_ID); 336 } 337 if (myParams.containsKey(Constants.PARAM_TAG)) { 338 paramNames.add(Constants.PARAM_TAG); 339 } 340 341 // Handle each parameter 342 for (String nextParamName : paramNames) { 343 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 344 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 345 // Elasticsearch 346 continue; 347 } 348 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 349 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 350 .setParamName(nextParamName) 351 .setAndOrParams(andOrParams) 352 .setRequest(theRequest) 353 .setRequestPartitionId(myRequestPartitionId)); 354 if (predicate != null) { 355 theSearchSqlBuilder.addPredicate(predicate); 356 } 357 } 358 } 359 360 /** 361 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 362 * parameters all have no modifiers. 363 */ 364 private boolean isCompositeUniqueSpCandidate() { 365 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 366 } 367 368 @SuppressWarnings("ConstantConditions") 369 @Override 370 public Long createCountQuery( 371 SearchParameterMap theParams, 372 String theSearchUuid, 373 RequestDetails theRequest, 374 @Nonnull RequestPartitionId theRequestPartitionId) { 375 376 assert theRequestPartitionId != null; 377 assert TransactionSynchronizationManager.isActualTransactionActive(); 378 379 init(theParams, theSearchUuid, theRequestPartitionId); 380 381 if (checkUseHibernateSearch()) { 382 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 383 } 384 385 SearchQueryProperties properties = mySearchProperties.clone(); 386 properties.setDoCountOnlyFlag(true); 387 properties.setSortSpec(null); // counts don't require sorts 388 properties.setMaxResultsRequested(null); 389 properties.setOffset(null); 390 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 391 if (queries.isEmpty()) { 392 return 0L; 393 } else { 394 JpaPid jpaPid = queries.get(0).next(); 395 return jpaPid.getId(); 396 } 397 } 398 399 /** 400 * @param thePidSet May be null 401 */ 402 @Override 403 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 404 myPidSet = new HashSet<>(thePidSet); 405 } 406 407 @SuppressWarnings("ConstantConditions") 408 @Override 409 public IResultIterator<JpaPid> createQuery( 410 SearchParameterMap theParams, 411 SearchRuntimeDetails theSearchRuntimeDetails, 412 RequestDetails theRequest, 413 @Nonnull RequestPartitionId theRequestPartitionId) { 414 assert theRequestPartitionId != null; 415 assert TransactionSynchronizationManager.isActualTransactionActive(); 416 417 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 418 419 if (myPidSet == null) { 420 myPidSet = new HashSet<>(); 421 } 422 423 return new QueryIterator(theSearchRuntimeDetails, theRequest); 424 } 425 426 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 427 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 428 // we mutate the params. Make a private copy. 429 myParams = theParams.clone(); 430 mySearchProperties.setSortSpec(myParams.getSort()); 431 mySearchUuid = theSearchUuid; 432 myRequestPartitionId = theRequestPartitionId; 433 } 434 435 /** 436 * The query created can be either a count query or the 437 * actual query. 438 * This is why it takes a SearchQueryProperties object 439 * (and doesn't use the local version of it). 440 * The properties may differ slightly for whichever 441 * query this is. 442 */ 443 private List<ISearchQueryExecutor> createQuery( 444 SearchParameterMap theParams, 445 SearchQueryProperties theSearchProperties, 446 RequestDetails theRequest, 447 SearchRuntimeDetails theSearchRuntimeDetails) { 448 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 449 450 if (checkUseHibernateSearch()) { 451 // we're going to run at least part of the search against the Fulltext service. 452 453 // Ugh - we have two different return types for now 454 ISearchQueryExecutor fulltextExecutor = null; 455 List<JpaPid> fulltextMatchIds = null; 456 int resultCount = 0; 457 if (myParams.isLastN()) { 458 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 459 resultCount = fulltextMatchIds.size(); 460 } else if (myParams.getEverythingMode() != null) { 461 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 462 resultCount = fulltextMatchIds.size(); 463 } else { 464 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 465 // enabled SP indexing). 466 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 467 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 468 // extra work in Elastic. 469 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 470 471 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 472 // a page. 473 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 474 } 475 476 if (fulltextExecutor == null) { 477 fulltextExecutor = 478 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 479 } 480 481 if (theSearchRuntimeDetails != null) { 482 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 483 IInterceptorBroadcaster compositeBroadcaster = 484 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 485 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 486 HookParams params = new HookParams() 487 .add(RequestDetails.class, theRequest) 488 .addIfMatchesType(ServletRequestDetails.class, theRequest) 489 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 490 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 491 } 492 } 493 494 // can we skip the database entirely and return the pid list from here? 495 boolean canSkipDatabase = 496 // if we processed an AND clause, and it returned nothing, then nothing can match. 497 !fulltextExecutor.hasNext() 498 || 499 // Our hibernate search query doesn't respect partitions yet 500 (!myPartitionSettings.isPartitioningEnabled() 501 && 502 // were there AND terms left? Then we still need the db. 503 theParams.isEmpty() 504 && 505 // not every param is a param. :-( 506 theParams.getNearDistanceParam() == null 507 && 508 // todo MB don't we support _lastUpdated and _offset now? 509 theParams.getLastUpdated() == null 510 && theParams.getEverythingMode() == null 511 && theParams.getOffset() == null); 512 513 if (canSkipDatabase) { 514 ourLog.trace("Query finished after HSearch. Skip db query phase"); 515 if (theSearchProperties.hasMaxResultsRequested()) { 516 fulltextExecutor = SearchQueryExecutors.limited( 517 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 518 } 519 queries.add(fulltextExecutor); 520 } else { 521 ourLog.trace("Query needs db after HSearch. Chunking."); 522 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 523 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 524 QueryChunker.chunk( 525 fulltextExecutor, 526 SearchBuilder.getMaximumPageSize(), 527 // for each list of (SearchBuilder.getMaximumPageSize()) 528 // we create a chunked query and add it to 'queries' 529 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 530 } 531 } else { 532 // do everything in the database. 533 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 534 } 535 536 return queries; 537 } 538 539 /** 540 * Check to see if query should use Hibernate Search, and error if the query can't continue. 541 * 542 * @return true if the query should first be processed by Hibernate Search 543 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 544 */ 545 private boolean checkUseHibernateSearch() { 546 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 547 548 if (!fulltextEnabled) { 549 failIfUsed(Constants.PARAM_TEXT); 550 failIfUsed(Constants.PARAM_CONTENT); 551 } else { 552 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 553 final String paramName = sortSpec.getParamName(); 554 if (paramName.contains(".")) { 555 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 556 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 557 } 558 } 559 } 560 561 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 562 // can. 563 return fulltextEnabled 564 && myParams != null 565 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 566 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 567 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 568 } 569 570 private void failIfUsed(String theParamName) { 571 if (myParams.containsKey(theParamName)) { 572 throw new InvalidRequestException(Msg.code(1192) 573 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 574 } 575 } 576 577 private void failIfUsedWithChainedSort(String theParamName) { 578 if (myParams.containsKey(theParamName)) { 579 throw new InvalidRequestException(Msg.code(2524) 580 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 581 + theParamName); 582 } 583 } 584 585 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 586 // Can we use our hibernate search generated index on resource to support lastN?: 587 if (myStorageSettings.isHibernateSearchIndexSearchParams()) { 588 if (myFulltextSearchSvc == null) { 589 throw new InvalidRequestException(Msg.code(2027) 590 + "LastN operation is not enabled on this service, can not process this request"); 591 } 592 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 593 .map(t -> (JpaPid) t) 594 .collect(Collectors.toList()); 595 } else { 596 throw new InvalidRequestException( 597 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 598 } 599 } 600 601 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 602 JpaPid pid = null; 603 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 604 String idParamValue; 605 IQueryParameterType idParam = 606 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 607 if (idParam instanceof TokenParam idParm) { 608 idParamValue = idParm.getValue(); 609 } else { 610 StringParam idParm = (StringParam) idParam; 611 idParamValue = idParm.getValue(); 612 } 613 614 pid = myIdHelperService 615 .resolveResourceIdentity( 616 myRequestPartitionId, 617 myResourceName, 618 idParamValue, 619 ResolveIdentityMode.includeDeleted().cacheOk()) 620 .getPersistentId(); 621 } 622 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 623 } 624 625 private void doCreateChunkedQueries( 626 SearchParameterMap theParams, 627 List<JpaPid> thePids, 628 SearchQueryProperties theSearchQueryProperties, 629 RequestDetails theRequest, 630 ArrayList<ISearchQueryExecutor> theQueries) { 631 632 if (thePids.size() < getMaximumPageSize()) { 633 thePids = normalizeIdListForInClause(thePids); 634 } 635 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 636 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 637 } 638 639 /** 640 * Combs through the params for any _id parameters and extracts the PIDs for them 641 */ 642 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 643 // get all the IQueryParameterType objects 644 // for _id -> these should all be StringParam values 645 HashSet<IIdType> ids = new HashSet<>(); 646 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 647 for (List<IQueryParameterType> paramList : params) { 648 for (IQueryParameterType param : paramList) { 649 String id; 650 if (param instanceof StringParam) { 651 // we expect all _id values to be StringParams 652 id = ((StringParam) param).getValue(); 653 } else if (param instanceof TokenParam) { 654 id = ((TokenParam) param).getValue(); 655 } else { 656 // we do not expect the _id parameter to be a non-string value 657 throw new IllegalArgumentException( 658 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 659 } 660 661 IIdType idType = myContext.getVersion().newIdType(); 662 if (id.contains("/")) { 663 idType.setValue(id); 664 } else { 665 idType.setValue(myResourceName + "/" + id); 666 } 667 ids.add(idType); 668 } 669 } 670 671 // fetch our target Pids 672 // this will throw if an id is not found 673 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 674 myRequestPartitionId, 675 new ArrayList<>(ids), 676 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 677 678 // add the pids to targetPids 679 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 680 theTargetPids.add(pid.getPersistentId()); 681 } 682 } 683 684 private void createChunkedQuery( 685 SearchParameterMap theParams, 686 SearchQueryProperties theSearchProperties, 687 RequestDetails theRequest, 688 List<JpaPid> thePidList, 689 List<ISearchQueryExecutor> theSearchQueryExecutors) { 690 if (myParams.getEverythingMode() != null) { 691 createChunkedQueryForEverythingSearch( 692 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 693 } else { 694 createChunkedQueryNormalSearch( 695 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 696 } 697 } 698 699 private void createChunkedQueryNormalSearch( 700 SearchParameterMap theParams, 701 SearchQueryProperties theSearchProperties, 702 RequestDetails theRequest, 703 List<JpaPid> thePidList, 704 List<ISearchQueryExecutor> theSearchQueryExecutors) { 705 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 706 myContext, 707 myStorageSettings, 708 myPartitionSettings, 709 myRequestPartitionId, 710 myResourceName, 711 mySqlBuilderFactory, 712 myDialectProvider, 713 theSearchProperties.isDoCountOnlyFlag()); 714 QueryStack queryStack3 = new QueryStack( 715 theRequest, 716 theParams, 717 myStorageSettings, 718 myContext, 719 sqlBuilder, 720 mySearchParamRegistry, 721 myPartitionSettings); 722 723 if (theParams.keySet().size() > 1 724 || theParams.getSort() != null 725 || theParams.keySet().contains(Constants.PARAM_HAS) 726 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 727 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 728 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 729 if (activeComboParams.isEmpty()) { 730 sqlBuilder.setNeedResourceTableRoot(true); 731 } 732 } 733 734 /* 735 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 736 * specific filters with ORs as their root from working around the natural resource type / deletion 737 * status / partition IDs built into queries. 738 */ 739 if (theParams.containsKey(Constants.PARAM_FILTER)) { 740 Condition partitionIdPredicate = sqlBuilder 741 .getOrCreateResourceTablePredicateBuilder() 742 .createPartitionIdPredicate(myRequestPartitionId); 743 if (partitionIdPredicate != null) { 744 sqlBuilder.addPredicate(partitionIdPredicate); 745 } 746 } 747 748 // Normal search 749 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 750 751 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 752 // partition ID predicate in that case. 753 if (!sqlBuilder.haveAtLeastOnePredicate()) { 754 Condition partitionIdPredicate = sqlBuilder 755 .getOrCreateResourceTablePredicateBuilder() 756 .createPartitionIdPredicate(myRequestPartitionId); 757 if (partitionIdPredicate != null) { 758 sqlBuilder.addPredicate(partitionIdPredicate); 759 } 760 } 761 762 // Add PID list predicate for full text search and/or lastn operation 763 addPidListPredicate(thePidList, sqlBuilder); 764 765 // Last updated 766 addLastUpdatePredicate(sqlBuilder); 767 768 /* 769 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 770 * to something needed to guarantee correct results. 771 * 772 * Why do we need it? Suppose for example, a query like: 773 * Observation?category=foo,bar,baz 774 * And suppose you have many resources that have all 3 of these category codes. In this case 775 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 776 * we may exhaust the query results without getting enough distinct results back. When that 777 * happens we re-run the query with a larger limit. Excluding results we already know about 778 * tries to ensure that we get new unique results. 779 * 780 * The challenge with that though is that lots of DBs have an issue with too many 781 * parameters in one query. So we only do this optimization if there aren't too 782 * many results. 783 */ 784 if (myHasNextIteratorQuery) { 785 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 786 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 787 } 788 } 789 790 /* 791 * If offset is present, we want to deduplicate the results by using GROUP BY; 792 * OR 793 * if the MaxResultsToFetch is null, we are requesting "everything", 794 * so we'll let the db do the deduplication (instead of in-memory) 795 */ 796 if (theSearchProperties.isDeduplicateInDatabase()) { 797 queryStack3.addGrouping(); 798 queryStack3.setUseAggregate(true); 799 } 800 801 /* 802 * Sort 803 * 804 * If we have a sort, we wrap the criteria search (the search that actually 805 * finds the appropriate resources) in an outer search which is then sorted 806 */ 807 if (theSearchProperties.hasSort()) { 808 assert !theSearchProperties.isDoCountOnlyFlag(); 809 810 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 811 } 812 813 /* 814 * Now perform the search 815 */ 816 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 817 } 818 819 private void executeSearch( 820 SearchQueryProperties theProperties, 821 List<ISearchQueryExecutor> theSearchQueryExecutors, 822 SearchQueryBuilder sqlBuilder) { 823 GeneratedSql generatedSql = 824 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 825 if (!generatedSql.isMatchNothing()) { 826 SearchQueryExecutor executor = 827 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 828 theSearchQueryExecutors.add(executor); 829 } 830 } 831 832 private void createChunkedQueryForEverythingSearch( 833 RequestDetails theRequest, 834 SearchParameterMap theParams, 835 SearchQueryProperties theSearchQueryProperties, 836 List<JpaPid> thePidList, 837 List<ISearchQueryExecutor> theSearchQueryExecutors) { 838 839 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 840 myContext, 841 myStorageSettings, 842 myPartitionSettings, 843 myRequestPartitionId, 844 null, 845 mySqlBuilderFactory, 846 myDialectProvider, 847 theSearchQueryProperties.isDoCountOnlyFlag()); 848 849 QueryStack queryStack3 = new QueryStack( 850 theRequest, 851 theParams, 852 myStorageSettings, 853 myContext, 854 sqlBuilder, 855 mySearchParamRegistry, 856 myPartitionSettings); 857 858 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 859 860 Set<JpaPid> targetPids = new HashSet<>(); 861 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 862 863 extractTargetPidsFromIdParams(targetPids); 864 865 // add the target pids to our executors as the first 866 // results iterator to go through 867 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 868 } else { 869 // For Everything queries, we make the query root by the ResourceLink table, since this query 870 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 871 // the one problem with this approach is that it doesn't catch Patients that have absolutely 872 // nothing linked to them. So we do one additional query to make sure we catch those too. 873 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 874 myContext, 875 myStorageSettings, 876 myPartitionSettings, 877 myRequestPartitionId, 878 myResourceName, 879 mySqlBuilderFactory, 880 myDialectProvider, 881 theSearchQueryProperties.isDoCountOnlyFlag()); 882 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 883 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 884 String sql = allTargetsSql.getSql(); 885 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 886 887 List<JpaPid> output = 888 jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); 889 890 // we add a search executor to fetch unlinked patients first 891 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 892 } 893 894 List<String> typeSourceResources = new ArrayList<>(); 895 if (myParams.get(Constants.PARAM_TYPE) != null) { 896 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 897 } 898 899 queryStack3.addPredicateEverythingOperation( 900 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 901 902 // Add PID list predicate for full text search and/or lastn operation 903 addPidListPredicate(thePidList, sqlBuilder); 904 905 /* 906 * If offset is present, we want deduplicate the results by using GROUP BY 907 * ORDER BY is required to make sure we return unique results for each page 908 */ 909 if (theSearchQueryProperties.hasOffset()) { 910 queryStack3.addGrouping(); 911 queryStack3.addOrdering(); 912 queryStack3.setUseAggregate(true); 913 } 914 915 /* 916 * Now perform the search 917 */ 918 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 919 } 920 921 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 922 if (thePidList != null && !thePidList.isEmpty()) { 923 theSqlBuilder.addResourceIdsPredicate(thePidList); 924 } 925 } 926 927 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 928 DateRangeParam lu = myParams.getLastUpdated(); 929 if (lu != null && !lu.isEmpty()) { 930 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 931 theSqlBuilder.addPredicate(lastUpdatedPredicates); 932 } 933 } 934 935 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 936 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 937 jdbcTemplate.setFetchSize(myFetchSize); 938 if (theMaximumResults != null) { 939 jdbcTemplate.setMaxRows(theMaximumResults); 940 } 941 return jdbcTemplate; 942 } 943 944 private Collection<String> extractTypeSourceResourcesFromParams() { 945 946 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 947 948 // first off, let's flatten the list of list 949 List<IQueryParameterType> iQueryParameterTypesList = 950 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 951 952 // then, extract all elements of each CSV into one big list 953 List<String> resourceTypes = iQueryParameterTypesList.stream() 954 .map(param -> ((StringParam) param).getValue()) 955 .map(csvString -> List.of(csvString.split(","))) 956 .flatMap(List::stream) 957 .collect(Collectors.toList()); 958 959 Set<String> knownResourceTypes = myContext.getResourceTypes(); 960 961 // remove leading/trailing whitespaces if any and remove duplicates 962 Set<String> retVal = new HashSet<>(); 963 964 for (String type : resourceTypes) { 965 String trimmed = type.trim(); 966 if (!knownResourceTypes.contains(trimmed)) { 967 throw new ResourceNotFoundException( 968 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 969 } 970 retVal.add(trimmed); 971 } 972 973 return retVal; 974 } 975 976 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 977 return myStorageSettings.isIndexOnContainedResources() 978 && theParams.values().stream() 979 .flatMap(Collection::stream) 980 .flatMap(Collection::stream) 981 .anyMatch(ReferenceParam.class::isInstance); 982 } 983 984 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 985 if (theSort == null || isBlank(theSort.getParamName())) { 986 return; 987 } 988 989 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 990 991 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 992 993 theQueryStack.addSortOnResourceId(ascending); 994 995 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 996 997 theQueryStack.addSortOnResourcePID(ascending); 998 999 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1000 1001 theQueryStack.addSortOnLastUpdated(ascending); 1002 1003 } else { 1004 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1005 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1006 1007 /* 1008 * If we have a sort like _sort=subject.name and we have an 1009 * uplifted refchain for that combination we can do it more efficiently 1010 * by using the index associated with the uplifted refchain. In this case, 1011 * we need to find the actual target search parameter (corresponding 1012 * to "name" in this example) so that we know what datatype it is. 1013 */ 1014 String paramName = theSort.getParamName(); 1015 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1016 String[] chains = StringUtils.split(paramName, '.'); 1017 if (chains.length == 2) { 1018 1019 // Given: Encounter?_sort=Patient:subject.name 1020 String referenceParam = chains[0]; // subject 1021 String referenceParamTargetType = null; // Patient 1022 String targetParam = chains[1]; // name 1023 1024 int colonIdx = referenceParam.indexOf(':'); 1025 if (colonIdx > -1) { 1026 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1027 referenceParam = referenceParam.substring(colonIdx + 1); 1028 } 1029 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1030 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1031 if (outerParam == null) { 1032 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1033 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1034 for (String nextTargetType : outerParam.getTargets()) { 1035 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1036 continue; 1037 } 1038 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1039 nextTargetType, 1040 targetParam, 1041 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1042 if (innerParam != null) { 1043 param = innerParam; 1044 break; 1045 } 1046 } 1047 } 1048 } 1049 } 1050 1051 int colonIdx = paramName.indexOf(':'); 1052 String referenceTargetType = null; 1053 if (colonIdx > -1) { 1054 referenceTargetType = paramName.substring(0, colonIdx); 1055 paramName = paramName.substring(colonIdx + 1); 1056 } 1057 1058 int dotIdx = paramName.indexOf('.'); 1059 String chainName = null; 1060 if (param == null && dotIdx > -1) { 1061 chainName = paramName.substring(dotIdx + 1); 1062 paramName = paramName.substring(0, dotIdx); 1063 if (chainName.contains(".")) { 1064 String msg = myContext 1065 .getLocalizer() 1066 .getMessageSanitized( 1067 BaseStorageDao.class, 1068 "invalidSortParameterTooManyChains", 1069 paramName + "." + chainName); 1070 throw new InvalidRequestException(Msg.code(2286) + msg); 1071 } 1072 } 1073 1074 if (param == null) { 1075 param = mySearchParamRegistry.getActiveSearchParam( 1076 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1077 } 1078 1079 if (param == null) { 1080 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1081 } 1082 1083 // param will never be null here (the above line throws if it does) 1084 // this is just to prevent the warning 1085 assert param != null; 1086 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1087 throw new InvalidRequestException( 1088 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1089 } 1090 1091 switch (param.getParamType()) { 1092 case STRING: 1093 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1094 break; 1095 case DATE: 1096 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1097 break; 1098 case REFERENCE: 1099 theQueryStack.addSortOnResourceLink( 1100 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1101 break; 1102 case TOKEN: 1103 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1104 break; 1105 case NUMBER: 1106 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1107 break; 1108 case URI: 1109 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1110 break; 1111 case QUANTITY: 1112 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1113 break; 1114 case COMPOSITE: 1115 List<RuntimeSearchParam> compositeList = 1116 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1117 if (compositeList == null) { 1118 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1119 + " is not defined by the resource " + myResourceName); 1120 } 1121 if (compositeList.size() != 2) { 1122 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1123 + " must have 2 composite types declared in parameter annotation, found " 1124 + compositeList.size()); 1125 } 1126 RuntimeSearchParam left = compositeList.get(0); 1127 RuntimeSearchParam right = compositeList.get(1); 1128 1129 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1130 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1131 1132 break; 1133 case SPECIAL: 1134 if (LOCATION_POSITION.equals(param.getPath())) { 1135 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1136 break; 1137 } 1138 throw new InvalidRequestException( 1139 Msg.code(2306) + "This server does not support _sort specifications of type " 1140 + param.getParamType() + " - Can't serve _sort=" + paramName); 1141 1142 case HAS: 1143 default: 1144 throw new InvalidRequestException( 1145 Msg.code(1197) + "This server does not support _sort specifications of type " 1146 + param.getParamType() + " - Can't serve _sort=" + paramName); 1147 } 1148 } 1149 1150 // Recurse 1151 createSort(theQueryStack, theSort.getChain(), theParams); 1152 } 1153 1154 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1155 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1156 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1157 String msg = myContext 1158 .getLocalizer() 1159 .getMessageSanitized( 1160 BaseStorageDao.class, 1161 "invalidSortParameter", 1162 theParamName, 1163 theResourceName, 1164 validSearchParameterNames); 1165 throw new InvalidRequestException(Msg.code(1194) + msg); 1166 } 1167 1168 private void createCompositeSort( 1169 QueryStack theQueryStack, 1170 RestSearchParameterTypeEnum theParamType, 1171 String theParamName, 1172 boolean theAscending) { 1173 1174 switch (theParamType) { 1175 case STRING: 1176 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1177 break; 1178 case DATE: 1179 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1180 break; 1181 case TOKEN: 1182 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1183 break; 1184 case QUANTITY: 1185 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1186 break; 1187 case NUMBER: 1188 case REFERENCE: 1189 case COMPOSITE: 1190 case URI: 1191 case HAS: 1192 case SPECIAL: 1193 default: 1194 throw new InvalidRequestException( 1195 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1196 + " on _sort=" + theParamName); 1197 } 1198 } 1199 1200 private void doLoadPids( 1201 Collection<JpaPid> thePids, 1202 Collection<JpaPid> theIncludedPids, 1203 List<IBaseResource> theResourceListToPopulate, 1204 boolean theForHistoryOperation, 1205 Map<Long, Integer> thePosition) { 1206 1207 Map<JpaPid, Long> resourcePidToVersion = null; 1208 for (JpaPid next : thePids) { 1209 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1210 if (resourcePidToVersion == null) { 1211 resourcePidToVersion = new HashMap<>(); 1212 } 1213 resourcePidToVersion.put(next, next.getVersion()); 1214 } 1215 } 1216 1217 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1218 if (versionlessPids.size() < getMaximumPageSize()) { 1219 versionlessPids = normalizeIdListForInClause(versionlessPids); 1220 } 1221 1222 // Load the resource bodies 1223 List<ResourceHistoryTable> resourceSearchViewList = 1224 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1225 JpaPidFk.fromPids(versionlessPids)); 1226 1227 /* 1228 * If we have specific versions to load, replace the history entries with the 1229 * correct ones 1230 * 1231 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1232 * version entity first, and by batching the fetches. But this is a fairly infrequently 1233 * used feature, and loading history entities by PK is a very efficient query so it's 1234 * not the end of the world 1235 */ 1236 if (resourcePidToVersion != null) { 1237 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1238 ResourceHistoryTable next = resourceSearchViewList.get(i); 1239 JpaPid resourceId = next.getPersistentId(); 1240 Long version = resourcePidToVersion.get(resourceId); 1241 resourceId.setVersion(version); 1242 if (version != null && !version.equals(next.getVersion())) { 1243 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1244 next.getResourceId().toFk(), version); 1245 resourceSearchViewList.set(i, replacement); 1246 } 1247 } 1248 } 1249 1250 // -- preload all tags with tag definition if any 1251 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1252 1253 for (ResourceHistoryTable next : resourceSearchViewList) { 1254 if (next.getDeleted() != null) { 1255 continue; 1256 } 1257 1258 Class<? extends IBaseResource> resourceType = 1259 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1260 1261 JpaPid resourceId = next.getPersistentId(); 1262 1263 if (resourcePidToVersion != null) { 1264 Long version = resourcePidToVersion.get(resourceId); 1265 resourceId.setVersion(version); 1266 } 1267 1268 IBaseResource resource; 1269 resource = myJpaStorageResourceParser.toResource( 1270 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1271 if (resource == null) { 1272 ourLog.warn( 1273 "Unable to find resource {}/{}/_history/{} in database", 1274 next.getResourceType(), 1275 next.getIdDt().getIdPart(), 1276 next.getVersion()); 1277 continue; 1278 } 1279 1280 Integer index = thePosition.get(resourceId.getId()); 1281 if (index == null) { 1282 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1283 continue; 1284 } 1285 1286 if (theIncludedPids.contains(resourceId)) { 1287 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1288 } else { 1289 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1290 } 1291 1292 // ensure there's enough space; "<=" because of 0-indexing 1293 while (theResourceListToPopulate.size() <= index) { 1294 theResourceListToPopulate.add(null); 1295 } 1296 theResourceListToPopulate.set(index, resource); 1297 } 1298 } 1299 1300 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1301 return switch (myStorageSettings.getTagStorageMode()) { 1302 case VERSIONED -> getPidToTagMapVersioned(theHistoryTables); 1303 case NON_VERSIONED -> getPidToTagMapUnversioned(theHistoryTables); 1304 default -> Map.of(); 1305 }; 1306 } 1307 1308 @Nonnull 1309 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1310 Collection<ResourceHistoryTable> theHistoryTables) { 1311 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1312 1313 // -- find all resource has tags 1314 for (ResourceHistoryTable resource : theHistoryTables) { 1315 if (resource.isHasTags()) { 1316 idList.add(resource.getId()); 1317 } 1318 } 1319 1320 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1321 1322 // -- no tags 1323 if (idList.isEmpty()) { 1324 return tagMap; 1325 } 1326 1327 // -- get all tags for the idList 1328 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1329 1330 // -- build the map, key = resourceId, value = list of ResourceTag 1331 JpaPid resourceId; 1332 Collection<BaseTag> tagCol; 1333 for (ResourceHistoryTag tag : tagList) { 1334 1335 resourceId = tag.getResourcePid(); 1336 tagCol = tagMap.get(resourceId); 1337 if (tagCol == null) { 1338 tagCol = new ArrayList<>(); 1339 tagCol.add(tag); 1340 tagMap.put(resourceId, tagCol); 1341 } else { 1342 tagCol.add(tag); 1343 } 1344 } 1345 1346 return tagMap; 1347 } 1348 1349 @Nonnull 1350 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1351 Collection<ResourceHistoryTable> theHistoryTables) { 1352 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1353 1354 // -- find all resource has tags 1355 for (ResourceHistoryTable resource : theHistoryTables) { 1356 if (resource.isHasTags()) { 1357 idList.add(resource.getResourceId()); 1358 } 1359 } 1360 1361 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1362 1363 // -- no tags 1364 if (idList.isEmpty()) { 1365 return tagMap; 1366 } 1367 1368 // -- get all tags for the idList 1369 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1370 1371 // -- build the map, key = resourceId, value = list of ResourceTag 1372 JpaPid resourceId; 1373 Collection<BaseTag> tagCol; 1374 for (ResourceTag tag : tagList) { 1375 1376 resourceId = tag.getResourceId(); 1377 tagCol = tagMap.get(resourceId); 1378 if (tagCol == null) { 1379 tagCol = new ArrayList<>(); 1380 tagCol.add(tag); 1381 tagMap.put(resourceId, tagCol); 1382 } else { 1383 tagCol.add(tag); 1384 } 1385 } 1386 1387 return tagMap; 1388 } 1389 1390 @Override 1391 public void loadResourcesByPid( 1392 Collection<JpaPid> thePids, 1393 Collection<JpaPid> theIncludedPids, 1394 List<IBaseResource> theResourceListToPopulate, 1395 boolean theForHistoryOperation, 1396 RequestDetails theDetails) { 1397 if (thePids.isEmpty()) { 1398 ourLog.debug("The include pids are empty"); 1399 } 1400 1401 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1402 // when running asserts 1403 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1404 1405 Map<Long, Integer> position = new HashMap<>(); 1406 int index = 0; 1407 for (JpaPid next : thePids) { 1408 position.put(next.getId(), index++); 1409 } 1410 1411 // Can we fast track this loading by checking elastic search? 1412 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1413 if (isUsingElasticSearch) { 1414 try { 1415 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1416 return; 1417 1418 } catch (ResourceNotFoundInIndexException theE) { 1419 // some resources were not found in index, so we will inform this and resort to JPA search 1420 ourLog.warn( 1421 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1422 } 1423 } 1424 1425 // We only chunk because some jdbc drivers can't handle long param lists. 1426 QueryChunker.chunk( 1427 thePids, 1428 t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1429 } 1430 1431 /** 1432 * Check if we can load the resources from Hibernate Search instead of the database. 1433 * We assume this is faster. 1434 * <p> 1435 * Hibernate Search only stores the current version, and only if enabled. 1436 * 1437 * @param thePids the pids to check for versioned references 1438 * @return can we fetch from Hibernate Search? 1439 */ 1440 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1441 // is storage enabled? 1442 return myStorageSettings.isStoreResourceInHSearchIndex() 1443 && myStorageSettings.isHibernateSearchIndexSearchParams() 1444 && 1445 // we don't support history 1446 thePids.stream().noneMatch(p -> p.getVersion() != null) 1447 && 1448 // skip the complexity for metadata in dstu2 1449 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1450 } 1451 1452 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1453 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1454 // only impl 1455 // to handle lastN? 1456 if (myStorageSettings.isHibernateSearchIndexSearchParams() 1457 && myStorageSettings.isStoreResourceInHSearchIndex()) { 1458 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1459 1460 return myFulltextSearchSvc.getResources(pidList); 1461 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1462 // legacy LastN implementation 1463 return myIElasticsearchSvc.getObservationResources(thePids); 1464 } else { 1465 return Collections.emptyList(); 1466 } 1467 } 1468 1469 /** 1470 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1471 * so it can't be Collections.emptySet() or some such thing. 1472 * The JpaPid returned will have resource type populated. 1473 */ 1474 @Override 1475 public Set<JpaPid> loadIncludes( 1476 FhirContext theContext, 1477 EntityManager theEntityManager, 1478 Collection<JpaPid> theMatches, 1479 Collection<Include> theIncludes, 1480 boolean theReverseMode, 1481 DateRangeParam theLastUpdated, 1482 String theSearchIdOrDescription, 1483 RequestDetails theRequest, 1484 Integer theMaxCount) { 1485 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1486 parameters.setFhirContext(theContext); 1487 parameters.setEntityManager(theEntityManager); 1488 parameters.setMatches(theMatches); 1489 parameters.setIncludeFilters(theIncludes); 1490 parameters.setReverseMode(theReverseMode); 1491 parameters.setLastUpdated(theLastUpdated); 1492 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1493 parameters.setRequestDetails(theRequest); 1494 parameters.setMaxCount(theMaxCount); 1495 return loadIncludes(parameters); 1496 } 1497 1498 @Override 1499 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1500 Collection<JpaPid> matches = theParameters.getMatches(); 1501 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1502 boolean reverseMode = theParameters.isReverseMode(); 1503 EntityManager entityManager = theParameters.getEntityManager(); 1504 Integer maxCount = theParameters.getMaxCount(); 1505 FhirContext fhirContext = theParameters.getFhirContext(); 1506 RequestDetails request = theParameters.getRequestDetails(); 1507 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1508 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1509 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1510 IInterceptorBroadcaster compositeBroadcaster = 1511 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1512 1513 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1514 CurrentThreadCaptureQueriesListener.startCapturing(); 1515 } 1516 if (matches.isEmpty()) { 1517 return new HashSet<>(); 1518 } 1519 if (currentIncludes == null || currentIncludes.isEmpty()) { 1520 return new HashSet<>(); 1521 } 1522 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1523 String searchPartitionIdFieldName = 1524 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1525 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1526 String findPartitionIdFieldName = 1527 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1528 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1529 String findVersionFieldName = null; 1530 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1531 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1532 } 1533 1534 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1535 HashSet<JpaPid> allAdded = new HashSet<>(); 1536 HashSet<JpaPid> original = new HashSet<>(matches); 1537 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1538 1539 int roundCounts = 0; 1540 StopWatch w = new StopWatch(); 1541 1542 boolean addedSomeThisRound; 1543 do { 1544 roundCounts++; 1545 1546 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1547 1548 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1549 Include nextInclude = iter.next(); 1550 if (!nextInclude.isRecurse()) { 1551 iter.remove(); 1552 } 1553 1554 // Account for _include=* 1555 boolean matchAll = "*".equals(nextInclude.getValue()); 1556 1557 // Account for _include=[resourceType]:* 1558 String wantResourceType = null; 1559 if (!matchAll) { 1560 if ("*".equals(nextInclude.getParamName())) { 1561 wantResourceType = nextInclude.getParamType(); 1562 matchAll = true; 1563 } 1564 } 1565 1566 if (matchAll) { 1567 loadIncludesMatchAll( 1568 findPidFieldName, 1569 findPartitionIdFieldName, 1570 findResourceTypeFieldName, 1571 findVersionFieldName, 1572 searchPidFieldName, 1573 searchPartitionIdFieldName, 1574 wantResourceType, 1575 reverseMode, 1576 hasDesiredResourceTypes, 1577 nextRoundMatches, 1578 entityManager, 1579 maxCount, 1580 desiredResourceTypes, 1581 pidsToInclude, 1582 request); 1583 } else { 1584 loadIncludesMatchSpecific( 1585 nextInclude, 1586 fhirContext, 1587 findPidFieldName, 1588 findPartitionIdFieldName, 1589 findVersionFieldName, 1590 searchPidFieldName, 1591 searchPartitionIdFieldName, 1592 reverseMode, 1593 nextRoundMatches, 1594 entityManager, 1595 maxCount, 1596 pidsToInclude, 1597 request); 1598 } 1599 } 1600 1601 nextRoundMatches.clear(); 1602 for (JpaPid next : pidsToInclude) { 1603 if (!original.contains(next) && !allAdded.contains(next)) { 1604 nextRoundMatches.add(next); 1605 } else { 1606 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1607 } 1608 } 1609 1610 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1611 1612 if (maxCount != null && allAdded.size() >= maxCount) { 1613 break; 1614 } 1615 1616 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1617 1618 allAdded.removeAll(original); 1619 1620 ourLog.info( 1621 "Loaded {} {} in {} rounds and {} ms for search {}", 1622 allAdded.size(), 1623 reverseMode ? "_revincludes" : "_includes", 1624 roundCounts, 1625 w.getMillisAndRestart(), 1626 searchIdOrDescription); 1627 1628 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1629 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1630 } 1631 1632 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1633 // This can be used to remove results from the search result details before 1634 // the user has a chance to know that they were in the results 1635 if (!allAdded.isEmpty()) { 1636 1637 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1638 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1639 JpaPreResourceAccessDetails accessDetails = 1640 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1641 HookParams params = new HookParams() 1642 .add(IPreResourceAccessDetails.class, accessDetails) 1643 .add(RequestDetails.class, request) 1644 .addIfMatchesType(ServletRequestDetails.class, request); 1645 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1646 1647 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1648 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1649 JpaPid value = includedPidList.remove(i); 1650 if (value != null) { 1651 allAdded.remove(value); 1652 } 1653 } 1654 } 1655 } 1656 } 1657 1658 return allAdded; 1659 } 1660 1661 private void loadIncludesMatchSpecific( 1662 Include nextInclude, 1663 FhirContext fhirContext, 1664 String findPidFieldName, 1665 String findPartitionFieldName, 1666 String findVersionFieldName, 1667 String searchPidFieldName, 1668 String searchPartitionFieldName, 1669 boolean reverseMode, 1670 List<JpaPid> nextRoundMatches, 1671 EntityManager entityManager, 1672 Integer maxCount, 1673 HashSet<JpaPid> pidsToInclude, 1674 RequestDetails theRequest) { 1675 List<String> paths; 1676 1677 // Start replace 1678 RuntimeSearchParam param; 1679 String resType = nextInclude.getParamType(); 1680 if (isBlank(resType)) { 1681 return; 1682 } 1683 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1684 if (def == null) { 1685 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1686 return; 1687 } 1688 1689 String paramName = nextInclude.getParamName(); 1690 if (isNotBlank(paramName)) { 1691 param = mySearchParamRegistry.getActiveSearchParam( 1692 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1693 } else { 1694 param = null; 1695 } 1696 if (param == null) { 1697 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1698 return; 1699 } 1700 1701 paths = param.getPathsSplitForResourceType(resType); 1702 // end replace 1703 1704 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1705 1706 for (String nextPath : paths) { 1707 String findPidFieldSqlColumn = 1708 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1709 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1710 if (findVersionFieldName != null) { 1711 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1712 } 1713 if (myPartitionSettings.isDatabasePartitionMode()) { 1714 fieldsToLoad += ", r."; 1715 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1716 ? "partition_id" 1717 : "target_res_partition_id"; 1718 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1719 } 1720 1721 // Query for includes lookup has 2 cases 1722 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1723 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1724 // url in target_resource_url 1725 1726 // Case 1: 1727 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1728 1729 String searchPidFieldSqlColumn = 1730 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1731 StringBuilder localReferenceQuery = new StringBuilder(); 1732 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1733 localReferenceQuery.append(" FROM hfj_res_link r "); 1734 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1735 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1736 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1737 } 1738 localReferenceQuery 1739 .append(" AND r.") 1740 .append(searchPidFieldSqlColumn) 1741 .append(" IN (:target_pids) "); 1742 if (myPartitionSettings.isDatabasePartitionMode()) { 1743 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1744 ? "target_res_partition_id" 1745 : "partition_id"; 1746 localReferenceQuery 1747 .append("AND r.") 1748 .append(partitionFieldToSearch) 1749 .append(" = :search_partition_id "); 1750 } 1751 localReferenceQueryParams.put("src_path", nextPath); 1752 // we loop over target_pids later. 1753 if (targetResourceTypes != null) { 1754 if (targetResourceTypes.size() == 1) { 1755 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1756 localReferenceQueryParams.put( 1757 "target_resource_type", 1758 targetResourceTypes.iterator().next()); 1759 } else { 1760 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1761 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1762 } 1763 } 1764 1765 // Case 2: 1766 Pair<String, Map<String, Object>> canonicalQuery = 1767 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); 1768 1769 String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1770 1771 Map<String, Object> limitParams = new HashMap<>(); 1772 if (maxCount != null) { 1773 LinkedList<Object> bindVariables = new LinkedList<>(); 1774 sql = SearchQueryBuilder.applyLimitToSql( 1775 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1776 1777 // The dialect SQL limiter uses positional params, but we're using 1778 // named params here, so we need to replace the positional params 1779 // with equivalent named ones 1780 StringBuilder sb = new StringBuilder(); 1781 for (int i = 0; i < sql.length(); i++) { 1782 char nextChar = sql.charAt(i); 1783 if (nextChar == '?') { 1784 String nextName = "limit" + i; 1785 sb.append(':').append(nextName); 1786 limitParams.put(nextName, bindVariables.removeFirst()); 1787 } else { 1788 sb.append(nextChar); 1789 } 1790 } 1791 sql = sb.toString(); 1792 } 1793 1794 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1795 for (Collection<JpaPid> nextPartition : partitions) { 1796 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1797 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1798 if (myPartitionSettings.isDatabasePartitionMode()) { 1799 q.setParameter( 1800 "search_partition_id", 1801 nextPartition.iterator().next().getPartitionId()); 1802 } 1803 localReferenceQueryParams.forEach(q::setParameter); 1804 canonicalQuery.getRight().forEach(q::setParameter); 1805 limitParams.forEach(q::setParameter); 1806 1807 @SuppressWarnings("unchecked") 1808 List<Tuple> results = q.getResultList(); 1809 for (Tuple result : results) { 1810 if (result != null) { 1811 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1812 Long resourceVersion = null; 1813 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1814 resourceVersion = 1815 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1816 } 1817 Integer partitionId = null; 1818 if (myPartitionSettings.isDatabasePartitionMode()) { 1819 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1820 } 1821 1822 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1823 pid.setPartitionId(partitionId); 1824 pidsToInclude.add(pid); 1825 } 1826 } 1827 } 1828 } 1829 } 1830 1831 private void loadIncludesMatchAll( 1832 String findPidFieldName, 1833 String findPartitionFieldName, 1834 String findResourceTypeFieldName, 1835 String findVersionFieldName, 1836 String searchPidFieldName, 1837 String searchPartitionFieldName, 1838 String wantResourceType, 1839 boolean reverseMode, 1840 boolean hasDesiredResourceTypes, 1841 List<JpaPid> nextRoundMatches, 1842 EntityManager entityManager, 1843 Integer maxCount, 1844 List<String> desiredResourceTypes, 1845 HashSet<JpaPid> pidsToInclude, 1846 RequestDetails request) { 1847 StringBuilder sqlBuilder = new StringBuilder(); 1848 sqlBuilder.append("SELECT r.").append(findPidFieldName); 1849 sqlBuilder.append(", r.").append(findResourceTypeFieldName); 1850 sqlBuilder.append(", r.myTargetResourceUrl"); 1851 if (findVersionFieldName != null) { 1852 sqlBuilder.append(", r.").append(findVersionFieldName); 1853 } 1854 if (myPartitionSettings.isDatabasePartitionMode()) { 1855 sqlBuilder.append(", r.").append(findPartitionFieldName); 1856 } 1857 sqlBuilder.append(" FROM ResourceLink r WHERE "); 1858 1859 if (myPartitionSettings.isDatabasePartitionMode()) { 1860 sqlBuilder.append("r.").append(searchPartitionFieldName); 1861 sqlBuilder.append(" = :target_partition_id AND "); 1862 } 1863 1864 sqlBuilder.append("r.").append(searchPidFieldName); 1865 sqlBuilder.append(" IN (:target_pids)"); 1866 1867 /* 1868 * We need to set the resource type in 2 cases only: 1869 * 1) we are in $everything mode 1870 * (where we only want to fetch specific resource types, regardless of what is 1871 * available to fetch) 1872 * 2) we are doing revincludes 1873 * 1874 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1875 * should always be checking the source resource type on the resource link. We don't 1876 * actually index that column though by default, so in order to try and be efficient 1877 * we don't actually include it for includes (but we do for revincludes). This is 1878 * because for an include, it doesn't really make sense to include a different 1879 * resource type than the one you are searching on. 1880 */ 1881 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1882 // because mySourceResourceType is not part of the HFJ_RES_LINK 1883 // index, this might not be the most optimal performance. 1884 // but it is for an $everything operation (and maybe we should update the index) 1885 sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type"); 1886 } else { 1887 wantResourceType = null; 1888 } 1889 1890 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1891 // resources 1892 // (e.g. via Provenance, List, or Group) when in an $everything operation 1893 if (myParams != null 1894 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1895 sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); 1896 sqlBuilder.append(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE.stream() 1897 .collect(Collectors.joining("', '", " AND r.mySourceResourceType NOT IN ('", "')"))); 1898 } 1899 if (hasDesiredResourceTypes) { 1900 sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)"); 1901 } 1902 1903 String sql = sqlBuilder.toString(); 1904 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1905 for (Collection<JpaPid> nextPartition : partitions) { 1906 TypedQuery<?> q = entityManager.createQuery(sql, Object[].class); 1907 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1908 if (myPartitionSettings.isDatabasePartitionMode()) { 1909 q.setParameter( 1910 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1911 } 1912 if (wantResourceType != null) { 1913 q.setParameter("want_resource_type", wantResourceType); 1914 } 1915 if (maxCount != null) { 1916 q.setMaxResults(maxCount); 1917 } 1918 if (hasDesiredResourceTypes) { 1919 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1920 } 1921 List<?> results = q.getResultList(); 1922 Set<String> canonicalUrls = null; 1923 for (Object nextRow : results) { 1924 if (nextRow == null) { 1925 // This can happen if there are outgoing references which are canonical or point to 1926 // other servers 1927 continue; 1928 } 1929 1930 Long version = null; 1931 Long resourceId = (Long) ((Object[]) nextRow)[0]; 1932 String resourceType = (String) ((Object[]) nextRow)[1]; 1933 String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; 1934 Integer partitionId = null; 1935 int offset = 0; 1936 if (findVersionFieldName != null) { 1937 version = (Long) ((Object[]) nextRow)[3]; 1938 offset++; 1939 } 1940 if (myPartitionSettings.isDatabasePartitionMode()) { 1941 partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); 1942 } 1943 1944 if (resourceId != null) { 1945 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1946 pid.setPartitionId(partitionId); 1947 pidsToInclude.add(pid); 1948 } else if (resourceCanonicalUrl != null) { 1949 if (canonicalUrls == null) { 1950 canonicalUrls = new HashSet<>(); 1951 } 1952 canonicalUrls.add(resourceCanonicalUrl); 1953 } 1954 } 1955 1956 if (canonicalUrls != null) { 1957 String message = 1958 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 1959 firePerformanceWarning(request, message); 1960 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1961 } 1962 } 1963 } 1964 1965 private void loadCanonicalUrls( 1966 RequestDetails theRequestDetails, 1967 Set<String> theCanonicalUrls, 1968 EntityManager theEntityManager, 1969 HashSet<JpaPid> thePidsToInclude, 1970 boolean theReverse) { 1971 StringBuilder sqlBuilder; 1972 CanonicalUrlTargets canonicalUrlTargets = 1973 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 1974 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 1975 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); 1976 1977 sqlBuilder = new StringBuilder(); 1978 sqlBuilder.append("SELECT "); 1979 if (myPartitionSettings.isPartitioningEnabled()) { 1980 sqlBuilder.append("i.myPartitionIdValue, "); 1981 } 1982 sqlBuilder.append("i.myResourcePid "); 1983 1984 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 1985 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 1986 sqlBuilder.append("AND i.myUri IN (:uris)"); 1987 1988 String canonicalResSql = sqlBuilder.toString(); 1989 1990 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 1991 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 1992 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); 1993 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 1994 List<Object[]> results = canonicalResIdQuery.getResultList(); 1995 for (var next : results) { 1996 if (next != null) { 1997 Integer partitionId = null; 1998 Long pid; 1999 if (next.length == 1) { 2000 pid = (Long) next[0]; 2001 } else { 2002 partitionId = (Integer) ((Object[]) next)[0]; 2003 pid = (Long) ((Object[]) next)[1]; 2004 } 2005 if (pid != null) { 2006 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2007 } 2008 } 2009 } 2010 } 2011 } 2012 2013 /** 2014 * Calls Performance Trace Hook 2015 * 2016 * @param request the request deatils 2017 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2018 */ 2019 private void callRawSqlHookWithCurrentThreadQueries( 2020 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2021 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2022 HookParams params = new HookParams() 2023 .add(RequestDetails.class, request) 2024 .addIfMatchesType(ServletRequestDetails.class, request) 2025 .add(SqlQueryList.class, capturedQueries); 2026 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2027 } 2028 2029 @Nullable 2030 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2031 String targetResourceType = nextInclude.getParamTargetType(); 2032 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2033 Set<String> targetResourceTypes; 2034 if (targetResourceType != null) { 2035 targetResourceTypes = Set.of(targetResourceType); 2036 } else if (haveTargetTypesDefinedByParam) { 2037 targetResourceTypes = param.getTargets(); 2038 } else { 2039 // all types! 2040 targetResourceTypes = null; 2041 } 2042 return targetResourceTypes; 2043 } 2044 2045 @Nonnull 2046 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2047 String theVersionFieldName, 2048 Set<String> theTargetResourceTypes, 2049 boolean theReverse, 2050 RequestDetails theRequest) { 2051 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2052 if (theVersionFieldName != null) { 2053 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2054 fieldsToLoadFromSpidxUriTable += ", NULL"; 2055 } 2056 2057 if (myPartitionSettings.isDatabasePartitionMode()) { 2058 if (theReverse) { 2059 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2060 } else { 2061 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2062 } 2063 } 2064 2065 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2066 // But sp_name isn't indexed, so we use hash_identity instead. 2067 CanonicalUrlTargets canonicalUrlTargets = 2068 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2069 2070 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2071 StringBuilder canonicalUrlQuery = new StringBuilder(); 2072 canonicalUrlQuery 2073 .append("SELECT ") 2074 .append(fieldsToLoadFromSpidxUriTable) 2075 .append(' '); 2076 canonicalUrlQuery.append("FROM hfj_res_link r "); 2077 2078 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2079 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2080 if (myPartitionSettings.isDatabasePartitionMode()) { 2081 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2082 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); 2083 } 2084 if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { 2085 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2086 canonicalUriQueryParams.put( 2087 "uri_identity_hash", 2088 canonicalUrlTargets.myHashIdentityValues.iterator().next()); 2089 } else { 2090 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2091 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); 2092 } 2093 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2094 canonicalUrlQuery.append(")"); 2095 2096 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2097 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2098 canonicalUrlQuery.append(" AND"); 2099 if (myPartitionSettings.isDatabasePartitionMode()) { 2100 if (theReverse) { 2101 canonicalUrlQuery.append(" rUri.partition_id"); 2102 } else { 2103 canonicalUrlQuery.append(" r.partition_id"); 2104 } 2105 canonicalUrlQuery.append(" = :search_partition_id"); 2106 canonicalUrlQuery.append(" AND"); 2107 } 2108 if (theReverse) { 2109 canonicalUrlQuery.append(" rUri.res_id"); 2110 } else { 2111 canonicalUrlQuery.append(" r.src_resource_id"); 2112 } 2113 canonicalUrlQuery.append(" IN (:target_pids)"); 2114 2115 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2116 } 2117 2118 @Nonnull 2119 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2120 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2121 Set<String> targetResourceTypes = theTargetResourceTypes; 2122 if (targetResourceTypes == null) { 2123 /* 2124 * If we don't have a list of valid target types, we need to figure out a list of all 2125 * possible target types in order to perform the search of the URI index table. This is 2126 * because the hash_identity column encodes the resource type, so we'll need a hash 2127 * value for each possible target type. 2128 */ 2129 targetResourceTypes = new HashSet<>(); 2130 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2131 if (theReverse) { 2132 // For reverse includes, it is really hard to figure out what types 2133 // are actually potentially pointing to the type we're searching for 2134 // in this context, so let's just assume it could be anything. 2135 targetResourceTypes = possibleTypes; 2136 } else { 2137 for (var next : mySearchParamRegistry 2138 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2139 .values() 2140 .stream() 2141 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2142 .collect(Collectors.toList())) { 2143 2144 // If the reference points to a Reference (ie not a canonical or CanonicalReference) 2145 // then it doesn't matter here anyhow. The logic here only works for elements at the 2146 // root level of the document (e.g. QuestionnaireResponse.subject or 2147 // QuestionnaireResponse.subject.where(...)) but this is just an optimization 2148 // anyhow. 2149 if (next.getPath().startsWith(myResourceName + ".")) { 2150 String elementName = 2151 next.getPath().substring(next.getPath().indexOf('.') + 1); 2152 int secondDotIndex = elementName.indexOf('.'); 2153 if (secondDotIndex != -1) { 2154 elementName = elementName.substring(0, secondDotIndex); 2155 } 2156 BaseRuntimeChildDefinition child = 2157 myContext.getResourceDefinition(myResourceName).getChildByName(elementName); 2158 if (child != null) { 2159 BaseRuntimeElementDefinition<?> childDef = child.getChildByName(elementName); 2160 if (childDef != null) { 2161 if (childDef.getName().equals("Reference")) { 2162 continue; 2163 } 2164 } 2165 } 2166 } 2167 2168 if (!next.getTargets().isEmpty()) { 2169 // For each reference parameter on the resource type we're searching for, 2170 // add all the potential target types to the list of possible target 2171 // resource types we can look up. 2172 for (var nextTarget : next.getTargets()) { 2173 if (possibleTypes.contains(nextTarget)) { 2174 targetResourceTypes.add(nextTarget); 2175 } 2176 } 2177 } else { 2178 // If we have any references that don't define any target types, then 2179 // we need to assume that all enabled resource types are possible target 2180 // types 2181 targetResourceTypes.addAll(possibleTypes); 2182 break; 2183 } 2184 } 2185 } 2186 } 2187 assert !targetResourceTypes.isEmpty(); 2188 2189 Set<Long> hashIdentityValues = new HashSet<>(); 2190 Set<Integer> partitionIds = new HashSet<>(); 2191 for (String type : targetResourceTypes) { 2192 2193 RequestPartitionId readPartition; 2194 if (myPartitionSettings.isPartitioningEnabled()) { 2195 readPartition = 2196 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2197 } else { 2198 readPartition = RequestPartitionId.defaultPartition(); 2199 } 2200 if (readPartition.hasPartitionIds()) { 2201 partitionIds.addAll(readPartition.getPartitionIds()); 2202 } 2203 2204 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2205 myPartitionSettings, readPartition, type, "url"); 2206 hashIdentityValues.add(hashIdentity); 2207 } 2208 2209 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2210 } 2211 2212 static class CanonicalUrlTargets { 2213 2214 @Nonnull 2215 final Set<Long> myHashIdentityValues; 2216 2217 @Nonnull 2218 final Set<Integer> myPartitionIds; 2219 2220 public CanonicalUrlTargets(@Nonnull Set<Long> theHashIdentityValues, @Nonnull Set<Integer> thePartitionIds) { 2221 myHashIdentityValues = theHashIdentityValues; 2222 myPartitionIds = thePartitionIds; 2223 } 2224 } 2225 2226 /** 2227 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2228 * those pids where: 2229 * <ul> 2230 * <li>No single list is most than {@literal theMaxLoad} entries</li> 2231 * <li>Each list only contains JpaPids with the same partition ID</li> 2232 * </ul> 2233 */ 2234 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2235 2236 if (theNextRoundMatches.size() <= theMaxLoad) { 2237 boolean allSamePartition = true; 2238 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2239 if (!Objects.equals( 2240 theNextRoundMatches.get(i - 1).getPartitionId(), 2241 theNextRoundMatches.get(i).getPartitionId())) { 2242 allSamePartition = false; 2243 break; 2244 } 2245 } 2246 if (allSamePartition) { 2247 return Collections.singletonList(theNextRoundMatches); 2248 } 2249 } 2250 2251 // Break into partitioned sublists 2252 ListMultimap<String, JpaPid> lists = 2253 MultimapBuilder.hashKeys().arrayListValues().build(); 2254 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2255 String partitionId = nextRoundMatch.getPartitionId() != null 2256 ? nextRoundMatch.getPartitionId().toString() 2257 : ""; 2258 lists.put(partitionId, nextRoundMatch); 2259 } 2260 2261 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2262 for (String key : lists.keySet()) { 2263 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2264 retVal.addAll(nextPartition); 2265 } 2266 2267 // In unit test mode, we sort the results just for unit test predictability 2268 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2269 retVal = retVal.stream() 2270 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2271 .collect(Collectors.toList()); 2272 } 2273 2274 return retVal; 2275 } 2276 2277 private void attemptComboUniqueSpProcessing( 2278 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2279 RuntimeSearchParam comboParam = null; 2280 List<String> comboParamNames = null; 2281 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2282 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2283 if (!exactMatchParams.isEmpty()) { 2284 comboParam = exactMatchParams.get(0); 2285 comboParamNames = new ArrayList<>(theParams.keySet()); 2286 } 2287 2288 if (comboParam == null) { 2289 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2290 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2291 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2292 List<String> nextCandidateParamNames = 2293 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2294 .map(RuntimeSearchParam::getName) 2295 .collect(Collectors.toList()); 2296 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2297 comboParam = nextCandidate; 2298 comboParamNames = nextCandidateParamNames; 2299 break; 2300 } 2301 } 2302 } 2303 2304 if (comboParam != null) { 2305 Collections.sort(comboParamNames); 2306 2307 // Since we're going to remove elements below 2308 theParams.values().forEach(this::ensureSubListsAreWritable); 2309 2310 /* 2311 * Apply search against the combo param index in a loop: 2312 * 2313 * 1. First we check whether the actual parameter values in the 2314 * parameter map are actually usable for searching against the combo 2315 * param index. E.g. no search modifiers, date comparators, etc., 2316 * since these mean you can't use the combo index. 2317 * 2318 * 2. Apply and create the join SQl. We remove parameter values from 2319 * the map as we apply them, so any parameter values remaining in the 2320 * map after each loop haven't yet been factored into the SQL. 2321 * 2322 * The loop allows us to create multiple combo index joins if there 2323 * are multiple AND expressions for the related parameters. 2324 */ 2325 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2326 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2327 } 2328 } 2329 } 2330 2331 private void applyComboSearchParam( 2332 QueryStack theQueryStack, 2333 @Nonnull SearchParameterMap theParams, 2334 RequestDetails theRequest, 2335 List<String> theComboParamNames, 2336 RuntimeSearchParam theComboParam) { 2337 2338 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2339 for (String nextParamName : theComboParamNames) { 2340 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2341 inputs.add(nextValues); 2342 } 2343 2344 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2345 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2346 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2347 2348 StringBuilder searchStringBuilder = new StringBuilder(); 2349 searchStringBuilder.append(myResourceName); 2350 searchStringBuilder.append("?"); 2351 2352 boolean first = true; 2353 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2354 2355 String nextParamName = theComboParamNames.get(paramIndex); 2356 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2357 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2358 // As a result, we strip the prefix if present. 2359 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2360 2361 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2362 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2363 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2364 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2365 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2366 } 2367 } 2368 2369 if (first) { 2370 first = false; 2371 } else { 2372 searchStringBuilder.append('&'); 2373 } 2374 2375 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2376 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2377 2378 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2379 } 2380 2381 String indexString = searchStringBuilder.toString(); 2382 ourLog.debug( 2383 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2384 2385 indexStrings.add(indexString); 2386 } 2387 2388 // Just to make sure we're stable for tests 2389 indexStrings.sort(Comparator.naturalOrder()); 2390 2391 // Interceptor broadcast: JPA_PERFTRACE_INFO 2392 IInterceptorBroadcaster compositeBroadcaster = 2393 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2394 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2395 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2396 StorageProcessingMessage msg = new StorageProcessingMessage() 2397 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2398 + indexStringForLog); 2399 HookParams params = new HookParams() 2400 .add(RequestDetails.class, theRequest) 2401 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2402 .add(StorageProcessingMessage.class, msg); 2403 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2404 } 2405 2406 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2407 case UNIQUE: 2408 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2409 break; 2410 case NON_UNIQUE: 2411 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2412 break; 2413 } 2414 2415 // Remove any empty parameters remaining after this 2416 theParams.clean(); 2417 } 2418 2419 /** 2420 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2421 * searching against a combo param with the given parameter names. This might be {@literal false} if 2422 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2423 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2424 */ 2425 private boolean validateParamValuesAreValidForComboParam( 2426 RequestDetails theRequest, 2427 @Nonnull SearchParameterMap theParams, 2428 List<String> theComboParamNames, 2429 RuntimeSearchParam theComboParam) { 2430 boolean paramValuesAreValidForCombo = true; 2431 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2432 2433 for (String nextParamName : theComboParamNames) { 2434 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2435 2436 if (nextValues == null || nextValues.isEmpty()) { 2437 paramValuesAreValidForCombo = false; 2438 break; 2439 } 2440 2441 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2442 paramOrValues.add(nextAndValue); 2443 2444 for (IQueryParameterType nextOrValue : nextAndValue) { 2445 if (nextOrValue instanceof DateParam dateParam) { 2446 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2447 String message = "Search with params " + theComboParamNames 2448 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2449 + nextParamName + "'"; 2450 firePerformanceInfo(theRequest, message); 2451 paramValuesAreValidForCombo = false; 2452 break; 2453 } 2454 } 2455 if (nextOrValue instanceof BaseParamWithPrefix<?> paramWithPrefix) { 2456 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2457 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2458 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2459 if (prefix != null && prefix != EQUAL) { 2460 String message = "Search with params " + theComboParamNames 2461 + " is not a candidate for combo searching - Parameter '" + nextParamName 2462 + "' has prefix: '" 2463 + paramWithPrefix.getPrefix().getValue() + "'"; 2464 firePerformanceInfo(theRequest, message); 2465 paramValuesAreValidForCombo = false; 2466 break; 2467 } 2468 } 2469 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2470 String message = "Search with params " + theComboParamNames 2471 + " is not a candidate for combo searching - Parameter '" + nextParamName 2472 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2473 firePerformanceInfo(theRequest, message); 2474 paramValuesAreValidForCombo = false; 2475 break; 2476 } 2477 } 2478 2479 // Reference params are only eligible for using a composite index if they 2480 // are qualified 2481 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2482 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2483 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2484 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2485 if (isBlank(param.getResourceType())) { 2486 ourLog.debug( 2487 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2488 paramValuesAreValidForCombo = false; 2489 break; 2490 } 2491 } 2492 2493 // Date params are not eligible for using composite unique index 2494 // as index could contain date with different precision (e.g. DAY, SECOND) 2495 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2496 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2497 ourLog.debug( 2498 "Search with params {} is not a candidate for combo searching - " 2499 + "Unique combo search parameter '{}' has DATE type", 2500 theComboParamNames, 2501 nextParamName); 2502 paramValuesAreValidForCombo = false; 2503 break; 2504 } 2505 } 2506 2507 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2508 ourLog.debug( 2509 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2510 paramValuesAreValidForCombo = false; 2511 } 2512 2513 return paramValuesAreValidForCombo; 2514 } 2515 2516 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2517 for (int i = 0; i < theListOfLists.size(); i++) { 2518 List<T> oldSubList = theListOfLists.get(i); 2519 if (!(oldSubList instanceof ArrayList)) { 2520 List<T> newSubList = new ArrayList<>(oldSubList); 2521 theListOfLists.set(i, newSubList); 2522 } 2523 } 2524 } 2525 2526 @Override 2527 public void setFetchSize(int theFetchSize) { 2528 myFetchSize = theFetchSize; 2529 } 2530 2531 public SearchParameterMap getParams() { 2532 return myParams; 2533 } 2534 2535 public CriteriaBuilder getBuilder() { 2536 return myCriteriaBuilder; 2537 } 2538 2539 public Class<? extends IBaseResource> getResourceType() { 2540 return myResourceType; 2541 } 2542 2543 public String getResourceName() { 2544 return myResourceName; 2545 } 2546 2547 /** 2548 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2549 */ 2550 public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2551 2552 private final RequestDetails myRequest; 2553 private final Set<JpaPid> myCurrentPids; 2554 private Iterator<JpaPid> myCurrentIterator; 2555 private JpaPid myNext; 2556 2557 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2558 myCurrentPids = new HashSet<>(thePidSet); 2559 myCurrentIterator = null; 2560 myRequest = theRequest; 2561 } 2562 2563 private void fetchNext() { 2564 while (myNext == null) { 2565 2566 if (myCurrentIterator == null) { 2567 Set<Include> includes = new HashSet<>(); 2568 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2569 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2570 for (IQueryParameterType type : typeList) { 2571 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2572 for (String resourceType : queryString.split(",")) { 2573 String rt = resourceType.trim(); 2574 if (isNotBlank(rt)) { 2575 includes.add(new Include(rt + ":*", true)); 2576 } 2577 } 2578 } 2579 } 2580 } 2581 if (includes.isEmpty()) { 2582 includes.add(new Include("*", true)); 2583 } 2584 Set<JpaPid> newPids = loadIncludes( 2585 myContext, 2586 myEntityManager, 2587 myCurrentPids, 2588 includes, 2589 false, 2590 getParams().getLastUpdated(), 2591 mySearchUuid, 2592 myRequest, 2593 null); 2594 myCurrentIterator = newPids.iterator(); 2595 } 2596 2597 if (myCurrentIterator.hasNext()) { 2598 myNext = myCurrentIterator.next(); 2599 } else { 2600 myNext = NO_MORE; 2601 } 2602 } 2603 } 2604 2605 @Override 2606 public boolean hasNext() { 2607 fetchNext(); 2608 return !NO_MORE.equals(myNext); 2609 } 2610 2611 @Override 2612 public JpaPid next() { 2613 fetchNext(); 2614 JpaPid retVal = myNext; 2615 myNext = null; 2616 return retVal; 2617 } 2618 } 2619 2620 /** 2621 * Basic Query iterator, used to fetch the results of a query. 2622 */ 2623 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2624 2625 private final SearchRuntimeDetails mySearchRuntimeDetails; 2626 private final RequestDetails myRequest; 2627 private final boolean myHaveRawSqlHooks; 2628 private final boolean myHavePerfTraceFoundIdHook; 2629 private final SortSpec mySort; 2630 private final Integer myOffset; 2631 private final IInterceptorBroadcaster myCompositeBroadcaster; 2632 private boolean myFirst = true; 2633 private IncludesIterator myIncludesIterator; 2634 /** 2635 * The next JpaPid value of the next result in this query. 2636 * Will not be null if fetched using getNext() 2637 */ 2638 private JpaPid myNext; 2639 /** 2640 * The current query result iterator running sql and supplying PIDs 2641 * @see #myQueryList 2642 */ 2643 private ISearchQueryExecutor myResultsIterator; 2644 2645 private boolean myFetchIncludesForEverythingOperation; 2646 /** 2647 * The count of resources skipped because they were seen in earlier results 2648 */ 2649 private int mySkipCount = 0; 2650 /** 2651 * The count of resources that are new in this search 2652 * (ie, not cached in previous searches) 2653 */ 2654 private int myNonSkipCount = 0; 2655 2656 /** 2657 * The list of queries to use to find all results. 2658 * Normal JPA queries will normally have a single entry. 2659 * Queries that involve Hibernate Search/Elasticsearch may have 2660 * multiple queries because of chunking. 2661 * The $everything operation also jams some extra results in. 2662 */ 2663 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2664 2665 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2666 mySearchRuntimeDetails = theSearchRuntimeDetails; 2667 mySort = myParams.getSort(); 2668 myOffset = myParams.getOffset(); 2669 myRequest = theRequest; 2670 myCompositeBroadcaster = 2671 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2672 2673 // everything requires fetching recursively all related resources 2674 if (myParams.getEverythingMode() != null) { 2675 myFetchIncludesForEverythingOperation = true; 2676 } 2677 2678 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2679 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2680 } 2681 2682 private void fetchNext() { 2683 try { 2684 if (myHaveRawSqlHooks) { 2685 CurrentThreadCaptureQueriesListener.startCapturing(); 2686 } 2687 2688 // If we don't have a query yet, create one 2689 if (myResultsIterator == null) { 2690 if (!mySearchProperties.hasMaxResultsRequested()) { 2691 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2692 } 2693 2694 /* 2695 * assigns the results iterator 2696 * and populates the myQueryList. 2697 */ 2698 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2699 } 2700 2701 if (myNext == null) { 2702 // no next means we need a new query (if one is available) 2703 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2704 /* 2705 * Because we combine our DB searches with Lucene 2706 * sometimes we can have multiple results iterators 2707 * (with only some having data in them to extract). 2708 * 2709 * We'll iterate our results iterators until we 2710 * either run out of results iterators, or we 2711 * have one that actually has data in it. 2712 */ 2713 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2714 retrieveNextIteratorQuery(); 2715 } 2716 2717 if (!myResultsIterator.hasNext()) { 2718 // we couldn't find a results iterator; 2719 // we're done here 2720 break; 2721 } 2722 2723 JpaPid nextPid = myResultsIterator.next(); 2724 if (myHavePerfTraceFoundIdHook) { 2725 callPerformanceTracingHook(nextPid); 2726 } 2727 2728 if (nextPid != null) { 2729 if (!myPidSet.contains(nextPid)) { 2730 if (!mySearchProperties.isDeduplicateInDatabase()) { 2731 /* 2732 * We only add to the map if we aren't fetching "everything"; 2733 * otherwise, we let the de-duplication happen in the database 2734 * (see createChunkedQueryNormalSearch above), because it 2735 * saves memory that way. 2736 */ 2737 myPidSet.add(nextPid); 2738 } 2739 if (doNotSkipNextPidForEverything()) { 2740 myNext = nextPid; 2741 myNonSkipCount++; 2742 break; 2743 } 2744 } else { 2745 mySkipCount++; 2746 } 2747 } 2748 2749 if (!myResultsIterator.hasNext()) { 2750 if (mySearchProperties.hasMaxResultsRequested() 2751 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2752 if (mySkipCount > 0 && myNonSkipCount == 0) { 2753 sendProcessingMsgAndFirePerformanceHook(); 2754 // need the next iterator; increase the maxsize 2755 // (we should always do this) 2756 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2757 mySearchProperties.setMaxResultsRequested(maxResults); 2758 2759 if (!mySearchProperties.isDeduplicateInDatabase()) { 2760 // if we're not using the database to deduplicate 2761 // we should recheck our memory usage 2762 // the prefetch size check is future proofing 2763 int prefetchSize = myStorageSettings 2764 .getSearchPreFetchThresholds() 2765 .size(); 2766 if (prefetchSize > 0) { 2767 if (myStorageSettings 2768 .getSearchPreFetchThresholds() 2769 .get(prefetchSize - 1) 2770 < mySearchProperties.getMaxResultsRequested()) { 2771 mySearchProperties.setDeduplicateInDatabase(true); 2772 } 2773 } 2774 } 2775 2776 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2777 } 2778 } 2779 } 2780 } 2781 } 2782 2783 if (myNext == null) { 2784 // if we got here, it means the current JpaPid has already been processed, 2785 // and we will decide (here) if we need to fetch related resources recursively 2786 if (myFetchIncludesForEverythingOperation) { 2787 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2788 myFetchIncludesForEverythingOperation = false; 2789 } 2790 if (myIncludesIterator != null) { 2791 while (myIncludesIterator.hasNext()) { 2792 JpaPid next = myIncludesIterator.next(); 2793 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2794 myNext = next; 2795 break; 2796 } 2797 } 2798 if (myNext == null) { 2799 myNext = NO_MORE; 2800 } 2801 } else { 2802 myNext = NO_MORE; 2803 } 2804 } 2805 2806 if (!mySearchProperties.hasMaxResultsRequested()) { 2807 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2808 } else { 2809 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2810 } 2811 2812 } finally { 2813 // search finished - fire hooks 2814 if (myHaveRawSqlHooks) { 2815 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2816 } 2817 } 2818 2819 if (myFirst) { 2820 HookParams params = new HookParams() 2821 .add(RequestDetails.class, myRequest) 2822 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2823 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2824 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2825 myFirst = false; 2826 } 2827 2828 if (NO_MORE.equals(myNext)) { 2829 HookParams params = new HookParams() 2830 .add(RequestDetails.class, myRequest) 2831 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2832 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2833 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2834 } 2835 } 2836 2837 private Integer calculateMaxResultsToFetch() { 2838 if (myParams.getLoadSynchronousUpTo() != null) { 2839 return myParams.getLoadSynchronousUpTo(); 2840 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2841 return myParams.getEverythingMode() != null 2842 ? myParams.getOffset() + myParams.getCount() 2843 : myParams.getCount(); 2844 } else { 2845 return myStorageSettings.getFetchSizeDefaultMaximum(); 2846 } 2847 } 2848 2849 private boolean doNotSkipNextPidForEverything() { 2850 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2851 } 2852 2853 private void callPerformanceTracingHook(JpaPid theNextPid) { 2854 HookParams params = new HookParams() 2855 .add(Integer.class, System.identityHashCode(this)) 2856 .add(Object.class, theNextPid); 2857 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2858 } 2859 2860 private void sendProcessingMsgAndFirePerformanceHook() { 2861 String msg = "Pass completed with no matching results seeking rows " 2862 + myPidSet.size() + "-" + mySkipCount 2863 + ". This indicates an inefficient query! Retrying with new max count of " 2864 + mySearchProperties.getMaxResultsRequested(); 2865 firePerformanceWarning(myRequest, msg); 2866 } 2867 2868 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2869 Integer offset = theOffset; 2870 if (myQueryList.isEmpty()) { 2871 // Capture times for Lucene/Elasticsearch queries as well 2872 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2873 2874 // setting offset to 0 to fetch all resource ids to guarantee 2875 // correct output result for everything operation during paging 2876 if (myParams.getEverythingMode() != null) { 2877 offset = 0; 2878 } 2879 2880 SearchQueryProperties properties = mySearchProperties.clone(); 2881 properties 2882 .setOffset(offset) 2883 .setMaxResultsRequested(theMaxResultsToFetch) 2884 .setDoCountOnlyFlag(false) 2885 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2886 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2887 } 2888 2889 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2890 2891 retrieveNextIteratorQuery(); 2892 2893 mySkipCount = 0; 2894 myNonSkipCount = 0; 2895 } 2896 2897 private void retrieveNextIteratorQuery() { 2898 close(); 2899 if (isNotEmpty(myQueryList)) { 2900 myResultsIterator = myQueryList.remove(0); 2901 myHasNextIteratorQuery = true; 2902 } else { 2903 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2904 myHasNextIteratorQuery = false; 2905 } 2906 } 2907 2908 @Override 2909 public boolean hasNext() { 2910 if (myNext == null) { 2911 fetchNext(); 2912 } 2913 return !NO_MORE.equals(myNext); 2914 } 2915 2916 @Override 2917 public JpaPid next() { 2918 fetchNext(); 2919 JpaPid retVal = myNext; 2920 myNext = null; 2921 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2922 return retVal; 2923 } 2924 2925 @Override 2926 public int getSkippedCount() { 2927 return mySkipCount; 2928 } 2929 2930 @Override 2931 public int getNonSkippedCount() { 2932 return myNonSkipCount; 2933 } 2934 2935 @Override 2936 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2937 Collection<JpaPid> batch = new ArrayList<>(); 2938 while (this.hasNext() && batch.size() < theBatchSize) { 2939 batch.add(this.next()); 2940 } 2941 return batch; 2942 } 2943 2944 @Override 2945 public void close() { 2946 if (myResultsIterator != null) { 2947 myResultsIterator.close(); 2948 } 2949 myResultsIterator = null; 2950 } 2951 } 2952 2953 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2954 // Only log at debug level since these messages aren't considered important enough 2955 // that we should be cluttering the system log, but they are important to the 2956 // specific query being executed to we'll INFO level them there 2957 ourLog.debug(theMessage); 2958 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2959 } 2960 2961 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2962 ourLog.warn(theMessage); 2963 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2964 } 2965 2966 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 2967 IInterceptorBroadcaster compositeBroadcaster = 2968 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2969 if (compositeBroadcaster.hasHooks(thePointcut)) { 2970 StorageProcessingMessage message = new StorageProcessingMessage(); 2971 message.setMessage(theMessage); 2972 HookParams params = new HookParams() 2973 .add(RequestDetails.class, theRequest) 2974 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2975 .add(StorageProcessingMessage.class, message); 2976 compositeBroadcaster.callHooks(thePointcut, params); 2977 } 2978 } 2979 2980 public static int getMaximumPageSize() { 2981 if (myMaxPageSizeForTests != null) { 2982 return myMaxPageSizeForTests; 2983 } 2984 return MAXIMUM_PAGE_SIZE; 2985 } 2986 2987 public static void setMaxPageSizeForTest(Integer theTestSize) { 2988 myMaxPageSizeForTests = theTestSize; 2989 } 2990}