
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.BaseRuntimeChildDefinition; 023import ca.uhn.fhir.context.BaseRuntimeElementDefinition; 024import ca.uhn.fhir.context.ComboSearchParamType; 025import ca.uhn.fhir.context.FhirContext; 026import ca.uhn.fhir.context.FhirVersionEnum; 027import ca.uhn.fhir.context.RuntimeResourceDefinition; 028import ca.uhn.fhir.context.RuntimeSearchParam; 029import ca.uhn.fhir.i18n.Msg; 030import ca.uhn.fhir.interceptor.api.HookParams; 031import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 032import ca.uhn.fhir.interceptor.api.Pointcut; 033import ca.uhn.fhir.interceptor.model.RequestPartitionId; 034import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 035import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 036import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 037import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 038import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 039import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 040import ca.uhn.fhir.jpa.dao.BaseStorageDao; 041import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 042import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 043import ca.uhn.fhir.jpa.dao.IResultIterator; 044import ca.uhn.fhir.jpa.dao.ISearchBuilder; 045import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 046import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 047import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 048import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 049import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 050import ca.uhn.fhir.jpa.model.config.PartitionSettings; 051import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 052import ca.uhn.fhir.jpa.model.dao.JpaPid; 053import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 054import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 055import ca.uhn.fhir.jpa.model.entity.BaseTag; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 057import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 058import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 059import ca.uhn.fhir.jpa.model.entity.ResourceTag; 060import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 061import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 062import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.StopWatch; 108import ca.uhn.fhir.util.StringUtil; 109import ca.uhn.fhir.util.UrlUtil; 110import com.google.common.annotations.VisibleForTesting; 111import com.google.common.collect.ListMultimap; 112import com.google.common.collect.Lists; 113import com.google.common.collect.MultimapBuilder; 114import com.healthmarketscience.sqlbuilder.Condition; 115import jakarta.annotation.Nonnull; 116import jakarta.annotation.Nullable; 117import jakarta.persistence.EntityManager; 118import jakarta.persistence.PersistenceContext; 119import jakarta.persistence.PersistenceContextType; 120import jakarta.persistence.Query; 121import jakarta.persistence.Tuple; 122import jakarta.persistence.TypedQuery; 123import jakarta.persistence.criteria.CriteriaBuilder; 124import org.apache.commons.collections4.ListUtils; 125import org.apache.commons.lang3.StringUtils; 126import org.apache.commons.lang3.Validate; 127import org.apache.commons.lang3.math.NumberUtils; 128import org.apache.commons.lang3.tuple.Pair; 129import org.hl7.fhir.instance.model.api.IAnyResource; 130import org.hl7.fhir.instance.model.api.IBaseResource; 131import org.hl7.fhir.instance.model.api.IIdType; 132import org.slf4j.Logger; 133import org.slf4j.LoggerFactory; 134import org.springframework.beans.factory.annotation.Autowired; 135import org.springframework.jdbc.core.JdbcTemplate; 136import org.springframework.transaction.support.TransactionSynchronizationManager; 137 138import java.util.ArrayList; 139import java.util.Collection; 140import java.util.Collections; 141import java.util.Comparator; 142import java.util.HashMap; 143import java.util.HashSet; 144import java.util.Iterator; 145import java.util.LinkedList; 146import java.util.List; 147import java.util.Map; 148import java.util.Objects; 149import java.util.Set; 150import java.util.stream.Collectors; 151 152import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 153import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 154import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 155import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 156import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 157import static java.util.Objects.requireNonNull; 158import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 159import static org.apache.commons.lang3.StringUtils.defaultString; 160import static org.apache.commons.lang3.StringUtils.isBlank; 161import static org.apache.commons.lang3.StringUtils.isNotBlank; 162import static org.apache.commons.lang3.StringUtils.stripStart; 163 164/** 165 * The SearchBuilder is responsible for actually forming the SQL query that handles 166 * searches for resources 167 */ 168public class SearchBuilder implements ISearchBuilder<JpaPid> { 169 170 /** 171 * See loadResourcesByPid 172 * for an explanation of why we use the constant 800 173 */ 174 // NB: keep public 175 @Deprecated 176 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 177 178 public static final String RESOURCE_ID_ALIAS = "resource_id"; 179 public static final String PARTITION_ID_ALIAS = "partition_id"; 180 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 181 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 182 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 183 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 184 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 185 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 186 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 187 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 188 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 189 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 190 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 191 public static boolean myUseMaxPageSize50ForTest = false; 192 public static Integer myMaxPageSizeForTests = null; 193 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 194 protected final IResourceTagDao myResourceTagDao; 195 private String myResourceName; 196 private final Class<? extends IBaseResource> myResourceType; 197 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 198 private final SqlObjectFactory mySqlBuilderFactory; 199 private final HibernatePropertiesProvider myDialectProvider; 200 private final ISearchParamRegistry mySearchParamRegistry; 201 private final PartitionSettings myPartitionSettings; 202 private final DaoRegistry myDaoRegistry; 203 private final FhirContext myContext; 204 private final IIdHelperService<JpaPid> myIdHelperService; 205 private final JpaStorageSettings myStorageSettings; 206 207 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 208 protected EntityManager myEntityManager; 209 210 private CriteriaBuilder myCriteriaBuilder; 211 private SearchParameterMap myParams; 212 private String mySearchUuid; 213 private int myFetchSize; 214 215 private boolean myRequiresTotal; 216 217 /** 218 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 219 */ 220 private Set<JpaPid> myPidSet; 221 222 private boolean myHasNextIteratorQuery = false; 223 private RequestPartitionId myRequestPartitionId; 224 225 private SearchQueryProperties mySearchProperties; 226 227 private IFulltextSearchSvc myFulltextSearchSvc; 228 229 @Autowired(required = false) 230 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 231 myFulltextSearchSvc = theFulltextSearchSvc; 232 } 233 234 private IResourceHistoryTableDao myResourceHistoryTableDao; 235 236 private IJpaStorageResourceParser myJpaStorageResourceParser; 237 238 @Autowired(required = false) 239 private IElasticsearchSvc myIElasticsearchSvc; 240 241 @Autowired 242 private IResourceHistoryTagDao myResourceHistoryTagDao; 243 244 @Autowired 245 private IRequestPartitionHelperSvc myPartitionHelperSvc; 246 247 /** 248 * Constructor 249 */ 250 @SuppressWarnings({"rawtypes", "unchecked"}) 251 public SearchBuilder( 252 String theResourceName, 253 JpaStorageSettings theStorageSettings, 254 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 255 SqlObjectFactory theSqlBuilderFactory, 256 HibernatePropertiesProvider theDialectProvider, 257 ISearchParamRegistry theSearchParamRegistry, 258 PartitionSettings thePartitionSettings, 259 IInterceptorBroadcaster theInterceptorBroadcaster, 260 IResourceTagDao theResourceTagDao, 261 DaoRegistry theDaoRegistry, 262 FhirContext theContext, 263 IIdHelperService theIdHelperService, 264 IResourceHistoryTableDao theResourceHistoryTagDao, 265 IJpaStorageResourceParser theIJpaStorageResourceParser, 266 Class<? extends IBaseResource> theResourceType) { 267 myResourceName = theResourceName; 268 myResourceType = theResourceType; 269 myStorageSettings = theStorageSettings; 270 271 myEntityManagerFactory = theEntityManagerFactory; 272 mySqlBuilderFactory = theSqlBuilderFactory; 273 myDialectProvider = theDialectProvider; 274 mySearchParamRegistry = theSearchParamRegistry; 275 myPartitionSettings = thePartitionSettings; 276 myInterceptorBroadcaster = theInterceptorBroadcaster; 277 myResourceTagDao = theResourceTagDao; 278 myDaoRegistry = theDaoRegistry; 279 myContext = theContext; 280 myIdHelperService = theIdHelperService; 281 myResourceHistoryTableDao = theResourceHistoryTagDao; 282 myJpaStorageResourceParser = theIJpaStorageResourceParser; 283 284 mySearchProperties = new SearchQueryProperties(); 285 } 286 287 @VisibleForTesting 288 void setResourceName(String theName) { 289 myResourceName = theName; 290 } 291 292 @Override 293 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 294 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 295 } 296 297 @Override 298 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 299 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 300 } 301 302 @Override 303 public void setRequireTotal(boolean theRequireTotal) { 304 myRequiresTotal = theRequireTotal; 305 } 306 307 @Override 308 public boolean requiresTotal() { 309 return myRequiresTotal; 310 } 311 312 private void searchForIdsWithAndOr( 313 SearchQueryBuilder theSearchSqlBuilder, 314 QueryStack theQueryStack, 315 @Nonnull SearchParameterMap theParams, 316 RequestDetails theRequest) { 317 myParams = theParams; 318 mySearchProperties.setSortSpec(myParams.getSort()); 319 320 // Remove any empty parameters 321 theParams.clean(); 322 323 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 324 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 325 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 326 } 327 328 // Attempt to lookup via composite unique key. 329 if (isCompositeUniqueSpCandidate()) { 330 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 331 } 332 333 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 334 List<String> paramNames = myParams.keySet().stream() 335 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 336 .filter(t -> !t.equals(Constants.PARAM_TAG)) 337 .collect(Collectors.toList()); 338 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 339 paramNames.add(IAnyResource.SP_RES_ID); 340 } 341 if (myParams.containsKey(Constants.PARAM_TAG)) { 342 paramNames.add(Constants.PARAM_TAG); 343 } 344 345 // Handle each parameter 346 for (String nextParamName : paramNames) { 347 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 348 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 349 // Elasticsearch 350 continue; 351 } 352 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 353 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 354 .setParamName(nextParamName) 355 .setAndOrParams(andOrParams) 356 .setRequest(theRequest) 357 .setRequestPartitionId(myRequestPartitionId)); 358 if (predicate != null) { 359 theSearchSqlBuilder.addPredicate(predicate); 360 } 361 } 362 } 363 364 /** 365 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 366 * parameters all have no modifiers. 367 */ 368 private boolean isCompositeUniqueSpCandidate() { 369 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 370 } 371 372 @SuppressWarnings("ConstantConditions") 373 @Override 374 public Long createCountQuery( 375 SearchParameterMap theParams, 376 String theSearchUuid, 377 RequestDetails theRequest, 378 @Nonnull RequestPartitionId theRequestPartitionId) { 379 380 assert theRequestPartitionId != null; 381 assert TransactionSynchronizationManager.isActualTransactionActive(); 382 383 init(theParams, theSearchUuid, theRequestPartitionId); 384 385 if (checkUseHibernateSearch()) { 386 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 387 } 388 389 SearchQueryProperties properties = mySearchProperties.clone(); 390 properties.setDoCountOnlyFlag(true); 391 properties.setSortSpec(null); // counts don't require sorts 392 properties.setMaxResultsRequested(null); 393 properties.setOffset(null); 394 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 395 if (queries.isEmpty()) { 396 return 0L; 397 } else { 398 JpaPid jpaPid = queries.get(0).next(); 399 return jpaPid.getId(); 400 } 401 } 402 403 /** 404 * @param thePidSet May be null 405 */ 406 @Override 407 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 408 myPidSet = new HashSet<>(thePidSet); 409 } 410 411 @SuppressWarnings("ConstantConditions") 412 @Override 413 public IResultIterator<JpaPid> createQuery( 414 SearchParameterMap theParams, 415 SearchRuntimeDetails theSearchRuntimeDetails, 416 RequestDetails theRequest, 417 @Nonnull RequestPartitionId theRequestPartitionId) { 418 assert theRequestPartitionId != null; 419 assert TransactionSynchronizationManager.isActualTransactionActive(); 420 421 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 422 423 if (myPidSet == null) { 424 myPidSet = new HashSet<>(); 425 } 426 427 return new QueryIterator(theSearchRuntimeDetails, theRequest); 428 } 429 430 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 431 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 432 // we mutate the params. Make a private copy. 433 myParams = theParams.clone(); 434 mySearchProperties.setSortSpec(myParams.getSort()); 435 mySearchUuid = theSearchUuid; 436 myRequestPartitionId = theRequestPartitionId; 437 } 438 439 /** 440 * The query created can be either a count query or the 441 * actual query. 442 * This is why it takes a SearchQueryProperties object 443 * (and doesn't use the local version of it). 444 * The properties may differ slightly for whichever 445 * query this is. 446 */ 447 private List<ISearchQueryExecutor> createQuery( 448 SearchParameterMap theParams, 449 SearchQueryProperties theSearchProperties, 450 RequestDetails theRequest, 451 SearchRuntimeDetails theSearchRuntimeDetails) { 452 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 453 454 if (checkUseHibernateSearch()) { 455 // we're going to run at least part of the search against the Fulltext service. 456 457 // Ugh - we have two different return types for now 458 ISearchQueryExecutor fulltextExecutor = null; 459 List<JpaPid> fulltextMatchIds = null; 460 int resultCount = 0; 461 if (myParams.isLastN()) { 462 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 463 resultCount = fulltextMatchIds.size(); 464 } else if (myParams.getEverythingMode() != null) { 465 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 466 resultCount = fulltextMatchIds.size(); 467 } else { 468 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 469 // enabled SP indexing). 470 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 471 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 472 // extra work in Elastic. 473 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 474 475 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 476 // a page. 477 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 478 } 479 480 if (fulltextExecutor == null) { 481 fulltextExecutor = 482 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 483 } 484 485 if (theSearchRuntimeDetails != null) { 486 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 487 IInterceptorBroadcaster compositeBroadcaster = 488 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 489 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 490 HookParams params = new HookParams() 491 .add(RequestDetails.class, theRequest) 492 .addIfMatchesType(ServletRequestDetails.class, theRequest) 493 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 494 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 495 } 496 } 497 498 // can we skip the database entirely and return the pid list from here? 499 boolean canSkipDatabase = 500 // if we processed an AND clause, and it returned nothing, then nothing can match. 501 !fulltextExecutor.hasNext() 502 || 503 // Our hibernate search query doesn't respect partitions yet 504 (!myPartitionSettings.isPartitioningEnabled() 505 && 506 // were there AND terms left? Then we still need the db. 507 theParams.isEmpty() 508 && 509 // not every param is a param. :-( 510 theParams.getNearDistanceParam() == null 511 && 512 // todo MB don't we support _lastUpdated and _offset now? 513 theParams.getLastUpdated() == null 514 && theParams.getEverythingMode() == null 515 && theParams.getOffset() == null); 516 517 if (canSkipDatabase) { 518 ourLog.trace("Query finished after HSearch. Skip db query phase"); 519 if (theSearchProperties.hasMaxResultsRequested()) { 520 fulltextExecutor = SearchQueryExecutors.limited( 521 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 522 } 523 queries.add(fulltextExecutor); 524 } else { 525 ourLog.trace("Query needs db after HSearch. Chunking."); 526 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 527 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 528 new QueryChunker<JpaPid>() 529 .chunk( 530 fulltextExecutor, 531 SearchBuilder.getMaximumPageSize(), 532 // for each list of (SearchBuilder.getMaximumPageSize()) 533 // we create a chunked query and add it to 'queries' 534 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 535 } 536 } else { 537 // do everything in the database. 538 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 539 } 540 541 return queries; 542 } 543 544 /** 545 * Check to see if query should use Hibernate Search, and error if the query can't continue. 546 * 547 * @return true if the query should first be processed by Hibernate Search 548 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 549 */ 550 private boolean checkUseHibernateSearch() { 551 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 552 553 if (!fulltextEnabled) { 554 failIfUsed(Constants.PARAM_TEXT); 555 failIfUsed(Constants.PARAM_CONTENT); 556 } else { 557 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 558 final String paramName = sortSpec.getParamName(); 559 if (paramName.contains(".")) { 560 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 561 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 562 } 563 } 564 } 565 566 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 567 // can. 568 return fulltextEnabled 569 && myParams != null 570 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 571 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 572 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 573 } 574 575 private void failIfUsed(String theParamName) { 576 if (myParams.containsKey(theParamName)) { 577 throw new InvalidRequestException(Msg.code(1192) 578 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 579 } 580 } 581 582 private void failIfUsedWithChainedSort(String theParamName) { 583 if (myParams.containsKey(theParamName)) { 584 throw new InvalidRequestException(Msg.code(2524) 585 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 586 + theParamName); 587 } 588 } 589 590 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 591 // Can we use our hibernate search generated index on resource to support lastN?: 592 if (myStorageSettings.isAdvancedHSearchIndexing()) { 593 if (myFulltextSearchSvc == null) { 594 throw new InvalidRequestException(Msg.code(2027) 595 + "LastN operation is not enabled on this service, can not process this request"); 596 } 597 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 598 .map(t -> (JpaPid) t) 599 .collect(Collectors.toList()); 600 } else { 601 throw new InvalidRequestException( 602 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 603 } 604 } 605 606 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 607 JpaPid pid = null; 608 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 609 String idParamValue; 610 IQueryParameterType idParam = 611 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 612 if (idParam instanceof TokenParam) { 613 TokenParam idParm = (TokenParam) idParam; 614 idParamValue = idParm.getValue(); 615 } else { 616 StringParam idParm = (StringParam) idParam; 617 idParamValue = idParm.getValue(); 618 } 619 620 pid = myIdHelperService 621 .resolveResourceIdentity( 622 myRequestPartitionId, 623 myResourceName, 624 idParamValue, 625 ResolveIdentityMode.includeDeleted().cacheOk()) 626 .getPersistentId(); 627 } 628 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 629 } 630 631 private void doCreateChunkedQueries( 632 SearchParameterMap theParams, 633 List<JpaPid> thePids, 634 SearchQueryProperties theSearchQueryProperties, 635 RequestDetails theRequest, 636 ArrayList<ISearchQueryExecutor> theQueries) { 637 638 if (thePids.size() < getMaximumPageSize()) { 639 thePids = normalizeIdListForInClause(thePids); 640 } 641 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 642 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 643 } 644 645 /** 646 * Combs through the params for any _id parameters and extracts the PIDs for them 647 */ 648 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 649 // get all the IQueryParameterType objects 650 // for _id -> these should all be StringParam values 651 HashSet<IIdType> ids = new HashSet<>(); 652 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 653 for (List<IQueryParameterType> paramList : params) { 654 for (IQueryParameterType param : paramList) { 655 String id; 656 if (param instanceof StringParam) { 657 // we expect all _id values to be StringParams 658 id = ((StringParam) param).getValue(); 659 } else if (param instanceof TokenParam) { 660 id = ((TokenParam) param).getValue(); 661 } else { 662 // we do not expect the _id parameter to be a non-string value 663 throw new IllegalArgumentException( 664 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 665 } 666 667 IIdType idType = myContext.getVersion().newIdType(); 668 if (id.contains("/")) { 669 idType.setValue(id); 670 } else { 671 idType.setValue(myResourceName + "/" + id); 672 } 673 ids.add(idType); 674 } 675 } 676 677 // fetch our target Pids 678 // this will throw if an id is not found 679 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 680 myRequestPartitionId, 681 new ArrayList<>(ids), 682 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 683 684 // add the pids to targetPids 685 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 686 theTargetPids.add(pid.getPersistentId()); 687 } 688 } 689 690 private void createChunkedQuery( 691 SearchParameterMap theParams, 692 SearchQueryProperties theSearchProperties, 693 RequestDetails theRequest, 694 List<JpaPid> thePidList, 695 List<ISearchQueryExecutor> theSearchQueryExecutors) { 696 if (myParams.getEverythingMode() != null) { 697 createChunkedQueryForEverythingSearch( 698 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 699 } else { 700 createChunkedQueryNormalSearch( 701 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 702 } 703 } 704 705 private void createChunkedQueryNormalSearch( 706 SearchParameterMap theParams, 707 SearchQueryProperties theSearchProperties, 708 RequestDetails theRequest, 709 List<JpaPid> thePidList, 710 List<ISearchQueryExecutor> theSearchQueryExecutors) { 711 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 712 myContext, 713 myStorageSettings, 714 myPartitionSettings, 715 myRequestPartitionId, 716 myResourceName, 717 mySqlBuilderFactory, 718 myDialectProvider, 719 theSearchProperties.isDoCountOnlyFlag()); 720 QueryStack queryStack3 = new QueryStack( 721 theRequest, 722 theParams, 723 myStorageSettings, 724 myContext, 725 sqlBuilder, 726 mySearchParamRegistry, 727 myPartitionSettings); 728 729 if (theParams.keySet().size() > 1 730 || theParams.getSort() != null 731 || theParams.keySet().contains(Constants.PARAM_HAS) 732 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 733 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 734 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 735 if (activeComboParams.isEmpty()) { 736 sqlBuilder.setNeedResourceTableRoot(true); 737 } 738 } 739 740 /* 741 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 742 * specific filters with ORs as their root from working around the natural resource type / deletion 743 * status / partition IDs built into queries. 744 */ 745 if (theParams.containsKey(Constants.PARAM_FILTER)) { 746 Condition partitionIdPredicate = sqlBuilder 747 .getOrCreateResourceTablePredicateBuilder() 748 .createPartitionIdPredicate(myRequestPartitionId); 749 if (partitionIdPredicate != null) { 750 sqlBuilder.addPredicate(partitionIdPredicate); 751 } 752 } 753 754 // Normal search 755 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 756 757 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 758 // partition ID predicate in that case. 759 if (!sqlBuilder.haveAtLeastOnePredicate()) { 760 Condition partitionIdPredicate = sqlBuilder 761 .getOrCreateResourceTablePredicateBuilder() 762 .createPartitionIdPredicate(myRequestPartitionId); 763 if (partitionIdPredicate != null) { 764 sqlBuilder.addPredicate(partitionIdPredicate); 765 } 766 } 767 768 // Add PID list predicate for full text search and/or lastn operation 769 addPidListPredicate(thePidList, sqlBuilder); 770 771 // Last updated 772 addLastUpdatePredicate(sqlBuilder); 773 774 /* 775 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 776 * to something needed to guarantee correct results. 777 * 778 * Why do we need it? Suppose for example, a query like: 779 * Observation?category=foo,bar,baz 780 * And suppose you have many resources that have all 3 of these category codes. In this case 781 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 782 * we may exhaust the query results without getting enough distinct results back. When that 783 * happens we re-run the query with a larger limit. Excluding results we already know about 784 * tries to ensure that we get new unique results. 785 * 786 * The challenge with that though is that lots of DBs have an issue with too many 787 * parameters in one query. So we only do this optimization if there aren't too 788 * many results. 789 */ 790 if (myHasNextIteratorQuery) { 791 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 792 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 793 } 794 } 795 796 /* 797 * If offset is present, we want to deduplicate the results by using GROUP BY; 798 * OR 799 * if the MaxResultsToFetch is null, we are requesting "everything", 800 * so we'll let the db do the deduplication (instead of in-memory) 801 */ 802 if (theSearchProperties.isDeduplicateInDatabase()) { 803 queryStack3.addGrouping(); 804 queryStack3.setUseAggregate(true); 805 } 806 807 /* 808 * Sort 809 * 810 * If we have a sort, we wrap the criteria search (the search that actually 811 * finds the appropriate resources) in an outer search which is then sorted 812 */ 813 if (theSearchProperties.hasSort()) { 814 assert !theSearchProperties.isDoCountOnlyFlag(); 815 816 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 817 } 818 819 /* 820 * Now perform the search 821 */ 822 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 823 } 824 825 private void executeSearch( 826 SearchQueryProperties theProperties, 827 List<ISearchQueryExecutor> theSearchQueryExecutors, 828 SearchQueryBuilder sqlBuilder) { 829 GeneratedSql generatedSql = 830 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 831 if (!generatedSql.isMatchNothing()) { 832 SearchQueryExecutor executor = 833 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 834 theSearchQueryExecutors.add(executor); 835 } 836 } 837 838 private void createChunkedQueryForEverythingSearch( 839 RequestDetails theRequest, 840 SearchParameterMap theParams, 841 SearchQueryProperties theSearchQueryProperties, 842 List<JpaPid> thePidList, 843 List<ISearchQueryExecutor> theSearchQueryExecutors) { 844 845 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 846 myContext, 847 myStorageSettings, 848 myPartitionSettings, 849 myRequestPartitionId, 850 null, 851 mySqlBuilderFactory, 852 myDialectProvider, 853 theSearchQueryProperties.isDoCountOnlyFlag()); 854 855 QueryStack queryStack3 = new QueryStack( 856 theRequest, 857 theParams, 858 myStorageSettings, 859 myContext, 860 sqlBuilder, 861 mySearchParamRegistry, 862 myPartitionSettings); 863 864 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 865 866 Set<JpaPid> targetPids = new HashSet<>(); 867 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 868 869 extractTargetPidsFromIdParams(targetPids); 870 871 // add the target pids to our executors as the first 872 // results iterator to go through 873 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 874 } else { 875 // For Everything queries, we make the query root by the ResourceLink table, since this query 876 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 877 // the one problem with this approach is that it doesn't catch Patients that have absolutely 878 // nothing linked to them. So we do one additional query to make sure we catch those too. 879 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 880 myContext, 881 myStorageSettings, 882 myPartitionSettings, 883 myRequestPartitionId, 884 myResourceName, 885 mySqlBuilderFactory, 886 myDialectProvider, 887 theSearchQueryProperties.isDoCountOnlyFlag()); 888 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 889 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 890 String sql = allTargetsSql.getSql(); 891 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 892 893 List<JpaPid> output = 894 jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); 895 896 // we add a search executor to fetch unlinked patients first 897 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 898 } 899 900 List<String> typeSourceResources = new ArrayList<>(); 901 if (myParams.get(Constants.PARAM_TYPE) != null) { 902 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 903 } 904 905 queryStack3.addPredicateEverythingOperation( 906 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 907 908 // Add PID list predicate for full text search and/or lastn operation 909 addPidListPredicate(thePidList, sqlBuilder); 910 911 /* 912 * If offset is present, we want deduplicate the results by using GROUP BY 913 * ORDER BY is required to make sure we return unique results for each page 914 */ 915 if (theSearchQueryProperties.hasOffset()) { 916 queryStack3.addGrouping(); 917 queryStack3.addOrdering(); 918 queryStack3.setUseAggregate(true); 919 } 920 921 /* 922 * Now perform the search 923 */ 924 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 925 } 926 927 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 928 if (thePidList != null && !thePidList.isEmpty()) { 929 theSqlBuilder.addResourceIdsPredicate(thePidList); 930 } 931 } 932 933 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 934 DateRangeParam lu = myParams.getLastUpdated(); 935 if (lu != null && !lu.isEmpty()) { 936 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 937 theSqlBuilder.addPredicate(lastUpdatedPredicates); 938 } 939 } 940 941 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 942 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 943 jdbcTemplate.setFetchSize(myFetchSize); 944 if (theMaximumResults != null) { 945 jdbcTemplate.setMaxRows(theMaximumResults); 946 } 947 return jdbcTemplate; 948 } 949 950 private Collection<String> extractTypeSourceResourcesFromParams() { 951 952 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 953 954 // first off, let's flatten the list of list 955 List<IQueryParameterType> iQueryParameterTypesList = 956 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 957 958 // then, extract all elements of each CSV into one big list 959 List<String> resourceTypes = iQueryParameterTypesList.stream() 960 .map(param -> ((StringParam) param).getValue()) 961 .map(csvString -> List.of(csvString.split(","))) 962 .flatMap(List::stream) 963 .collect(Collectors.toList()); 964 965 Set<String> knownResourceTypes = myContext.getResourceTypes(); 966 967 // remove leading/trailing whitespaces if any and remove duplicates 968 Set<String> retVal = new HashSet<>(); 969 970 for (String type : resourceTypes) { 971 String trimmed = type.trim(); 972 if (!knownResourceTypes.contains(trimmed)) { 973 throw new ResourceNotFoundException( 974 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 975 } 976 retVal.add(trimmed); 977 } 978 979 return retVal; 980 } 981 982 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 983 return myStorageSettings.isIndexOnContainedResources() 984 && theParams.values().stream() 985 .flatMap(Collection::stream) 986 .flatMap(Collection::stream) 987 .anyMatch(ReferenceParam.class::isInstance); 988 } 989 990 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 991 if (theSort == null || isBlank(theSort.getParamName())) { 992 return; 993 } 994 995 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 996 997 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 998 999 theQueryStack.addSortOnResourceId(ascending); 1000 1001 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 1002 1003 theQueryStack.addSortOnResourcePID(ascending); 1004 1005 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1006 1007 theQueryStack.addSortOnLastUpdated(ascending); 1008 1009 } else { 1010 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1011 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1012 1013 /* 1014 * If we have a sort like _sort=subject.name and we have an 1015 * uplifted refchain for that combination we can do it more efficiently 1016 * by using the index associated with the uplifted refchain. In this case, 1017 * we need to find the actual target search parameter (corresponding 1018 * to "name" in this example) so that we know what datatype it is. 1019 */ 1020 String paramName = theSort.getParamName(); 1021 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1022 String[] chains = StringUtils.split(paramName, '.'); 1023 if (chains.length == 2) { 1024 1025 // Given: Encounter?_sort=Patient:subject.name 1026 String referenceParam = chains[0]; // subject 1027 String referenceParamTargetType = null; // Patient 1028 String targetParam = chains[1]; // name 1029 1030 int colonIdx = referenceParam.indexOf(':'); 1031 if (colonIdx > -1) { 1032 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1033 referenceParam = referenceParam.substring(colonIdx + 1); 1034 } 1035 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1036 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1037 if (outerParam == null) { 1038 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1039 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1040 for (String nextTargetType : outerParam.getTargets()) { 1041 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1042 continue; 1043 } 1044 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1045 nextTargetType, 1046 targetParam, 1047 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1048 if (innerParam != null) { 1049 param = innerParam; 1050 break; 1051 } 1052 } 1053 } 1054 } 1055 } 1056 1057 int colonIdx = paramName.indexOf(':'); 1058 String referenceTargetType = null; 1059 if (colonIdx > -1) { 1060 referenceTargetType = paramName.substring(0, colonIdx); 1061 paramName = paramName.substring(colonIdx + 1); 1062 } 1063 1064 int dotIdx = paramName.indexOf('.'); 1065 String chainName = null; 1066 if (param == null && dotIdx > -1) { 1067 chainName = paramName.substring(dotIdx + 1); 1068 paramName = paramName.substring(0, dotIdx); 1069 if (chainName.contains(".")) { 1070 String msg = myContext 1071 .getLocalizer() 1072 .getMessageSanitized( 1073 BaseStorageDao.class, 1074 "invalidSortParameterTooManyChains", 1075 paramName + "." + chainName); 1076 throw new InvalidRequestException(Msg.code(2286) + msg); 1077 } 1078 } 1079 1080 if (param == null) { 1081 param = mySearchParamRegistry.getActiveSearchParam( 1082 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1083 } 1084 1085 if (param == null) { 1086 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1087 } 1088 1089 // param will never be null here (the above line throws if it does) 1090 // this is just to prevent the warning 1091 assert param != null; 1092 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1093 throw new InvalidRequestException( 1094 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1095 } 1096 1097 switch (param.getParamType()) { 1098 case STRING: 1099 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1100 break; 1101 case DATE: 1102 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1103 break; 1104 case REFERENCE: 1105 theQueryStack.addSortOnResourceLink( 1106 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1107 break; 1108 case TOKEN: 1109 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1110 break; 1111 case NUMBER: 1112 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1113 break; 1114 case URI: 1115 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1116 break; 1117 case QUANTITY: 1118 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1119 break; 1120 case COMPOSITE: 1121 List<RuntimeSearchParam> compositeList = 1122 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1123 if (compositeList == null) { 1124 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1125 + " is not defined by the resource " + myResourceName); 1126 } 1127 if (compositeList.size() != 2) { 1128 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1129 + " must have 2 composite types declared in parameter annotation, found " 1130 + compositeList.size()); 1131 } 1132 RuntimeSearchParam left = compositeList.get(0); 1133 RuntimeSearchParam right = compositeList.get(1); 1134 1135 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1136 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1137 1138 break; 1139 case SPECIAL: 1140 if (LOCATION_POSITION.equals(param.getPath())) { 1141 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1142 break; 1143 } 1144 throw new InvalidRequestException( 1145 Msg.code(2306) + "This server does not support _sort specifications of type " 1146 + param.getParamType() + " - Can't serve _sort=" + paramName); 1147 1148 case HAS: 1149 default: 1150 throw new InvalidRequestException( 1151 Msg.code(1197) + "This server does not support _sort specifications of type " 1152 + param.getParamType() + " - Can't serve _sort=" + paramName); 1153 } 1154 } 1155 1156 // Recurse 1157 createSort(theQueryStack, theSort.getChain(), theParams); 1158 } 1159 1160 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1161 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1162 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1163 String msg = myContext 1164 .getLocalizer() 1165 .getMessageSanitized( 1166 BaseStorageDao.class, 1167 "invalidSortParameter", 1168 theParamName, 1169 theResourceName, 1170 validSearchParameterNames); 1171 throw new InvalidRequestException(Msg.code(1194) + msg); 1172 } 1173 1174 private void createCompositeSort( 1175 QueryStack theQueryStack, 1176 RestSearchParameterTypeEnum theParamType, 1177 String theParamName, 1178 boolean theAscending) { 1179 1180 switch (theParamType) { 1181 case STRING: 1182 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1183 break; 1184 case DATE: 1185 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1186 break; 1187 case TOKEN: 1188 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1189 break; 1190 case QUANTITY: 1191 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1192 break; 1193 case NUMBER: 1194 case REFERENCE: 1195 case COMPOSITE: 1196 case URI: 1197 case HAS: 1198 case SPECIAL: 1199 default: 1200 throw new InvalidRequestException( 1201 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1202 + " on _sort=" + theParamName); 1203 } 1204 } 1205 1206 private void doLoadPids( 1207 Collection<JpaPid> thePids, 1208 Collection<JpaPid> theIncludedPids, 1209 List<IBaseResource> theResourceListToPopulate, 1210 boolean theForHistoryOperation, 1211 Map<Long, Integer> thePosition) { 1212 1213 Map<JpaPid, Long> resourcePidToVersion = null; 1214 for (JpaPid next : thePids) { 1215 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1216 if (resourcePidToVersion == null) { 1217 resourcePidToVersion = new HashMap<>(); 1218 } 1219 resourcePidToVersion.put(next, next.getVersion()); 1220 } 1221 } 1222 1223 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1224 if (versionlessPids.size() < getMaximumPageSize()) { 1225 versionlessPids = normalizeIdListForInClause(versionlessPids); 1226 } 1227 1228 // Load the resource bodies 1229 List<ResourceHistoryTable> resourceSearchViewList = 1230 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1231 JpaPidFk.fromPids(versionlessPids)); 1232 1233 /* 1234 * If we have specific versions to load, replace the history entries with the 1235 * correct ones 1236 * 1237 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1238 * version entity first, and by batching the fetches. But this is a fairly infrequently 1239 * used feature, and loading history entities by PK is a very efficient query so it's 1240 * not the end of the world 1241 */ 1242 if (resourcePidToVersion != null) { 1243 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1244 ResourceHistoryTable next = resourceSearchViewList.get(i); 1245 JpaPid resourceId = next.getPersistentId(); 1246 Long version = resourcePidToVersion.get(resourceId); 1247 resourceId.setVersion(version); 1248 if (version != null && !version.equals(next.getVersion())) { 1249 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1250 next.getResourceId().toFk(), version); 1251 resourceSearchViewList.set(i, replacement); 1252 } 1253 } 1254 } 1255 1256 // -- preload all tags with tag definition if any 1257 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1258 1259 for (ResourceHistoryTable next : resourceSearchViewList) { 1260 if (next.getDeleted() != null) { 1261 continue; 1262 } 1263 1264 Class<? extends IBaseResource> resourceType = 1265 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1266 1267 JpaPid resourceId = next.getPersistentId(); 1268 1269 if (resourcePidToVersion != null) { 1270 Long version = resourcePidToVersion.get(resourceId); 1271 resourceId.setVersion(version); 1272 } 1273 1274 IBaseResource resource = null; 1275 resource = myJpaStorageResourceParser.toResource( 1276 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1277 if (resource == null) { 1278 ourLog.warn( 1279 "Unable to find resource {}/{}/_history/{} in database", 1280 next.getResourceType(), 1281 next.getIdDt().getIdPart(), 1282 next.getVersion()); 1283 continue; 1284 } 1285 1286 Integer index = thePosition.get(resourceId.getId()); 1287 if (index == null) { 1288 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1289 continue; 1290 } 1291 1292 if (theIncludedPids.contains(resourceId)) { 1293 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1294 } else { 1295 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1296 } 1297 1298 // ensure there's enough space; "<=" because of 0-indexing 1299 while (theResourceListToPopulate.size() <= index) { 1300 theResourceListToPopulate.add(null); 1301 } 1302 theResourceListToPopulate.set(index, resource); 1303 } 1304 } 1305 1306 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1307 switch (myStorageSettings.getTagStorageMode()) { 1308 case VERSIONED: 1309 return getPidToTagMapVersioned(theHistoryTables); 1310 case NON_VERSIONED: 1311 return getPidToTagMapUnversioned(theHistoryTables); 1312 case INLINE: 1313 default: 1314 return Map.of(); 1315 } 1316 } 1317 1318 @Nonnull 1319 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1320 Collection<ResourceHistoryTable> theHistoryTables) { 1321 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1322 1323 // -- find all resource has tags 1324 for (ResourceHistoryTable resource : theHistoryTables) { 1325 if (resource.isHasTags()) { 1326 idList.add(resource.getId()); 1327 } 1328 } 1329 1330 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1331 1332 // -- no tags 1333 if (idList.isEmpty()) { 1334 return tagMap; 1335 } 1336 1337 // -- get all tags for the idList 1338 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1339 1340 // -- build the map, key = resourceId, value = list of ResourceTag 1341 JpaPid resourceId; 1342 Collection<BaseTag> tagCol; 1343 for (ResourceHistoryTag tag : tagList) { 1344 1345 resourceId = tag.getResourcePid(); 1346 tagCol = tagMap.get(resourceId); 1347 if (tagCol == null) { 1348 tagCol = new ArrayList<>(); 1349 tagCol.add(tag); 1350 tagMap.put(resourceId, tagCol); 1351 } else { 1352 tagCol.add(tag); 1353 } 1354 } 1355 1356 return tagMap; 1357 } 1358 1359 @Nonnull 1360 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1361 Collection<ResourceHistoryTable> theHistoryTables) { 1362 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1363 1364 // -- find all resource has tags 1365 for (ResourceHistoryTable resource : theHistoryTables) { 1366 if (resource.isHasTags()) { 1367 idList.add(resource.getResourceId()); 1368 } 1369 } 1370 1371 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1372 1373 // -- no tags 1374 if (idList.isEmpty()) { 1375 return tagMap; 1376 } 1377 1378 // -- get all tags for the idList 1379 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1380 1381 // -- build the map, key = resourceId, value = list of ResourceTag 1382 JpaPid resourceId; 1383 Collection<BaseTag> tagCol; 1384 for (ResourceTag tag : tagList) { 1385 1386 resourceId = tag.getResourceId(); 1387 tagCol = tagMap.get(resourceId); 1388 if (tagCol == null) { 1389 tagCol = new ArrayList<>(); 1390 tagCol.add(tag); 1391 tagMap.put(resourceId, tagCol); 1392 } else { 1393 tagCol.add(tag); 1394 } 1395 } 1396 1397 return tagMap; 1398 } 1399 1400 @Override 1401 public void loadResourcesByPid( 1402 Collection<JpaPid> thePids, 1403 Collection<JpaPid> theIncludedPids, 1404 List<IBaseResource> theResourceListToPopulate, 1405 boolean theForHistoryOperation, 1406 RequestDetails theDetails) { 1407 if (thePids.isEmpty()) { 1408 ourLog.debug("The include pids are empty"); 1409 } 1410 1411 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1412 // when running asserts 1413 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1414 1415 Map<Long, Integer> position = new HashMap<>(); 1416 int index = 0; 1417 for (JpaPid next : thePids) { 1418 position.put(next.getId(), index++); 1419 } 1420 1421 // Can we fast track this loading by checking elastic search? 1422 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1423 if (isUsingElasticSearch) { 1424 try { 1425 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1426 return; 1427 1428 } catch (ResourceNotFoundInIndexException theE) { 1429 // some resources were not found in index, so we will inform this and resort to JPA search 1430 ourLog.warn( 1431 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1432 } 1433 } 1434 1435 // We only chunk because some jdbc drivers can't handle long param lists. 1436 QueryChunker.chunk( 1437 thePids, 1438 t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1439 } 1440 1441 /** 1442 * Check if we can load the resources from Hibernate Search instead of the database. 1443 * We assume this is faster. 1444 * <p> 1445 * Hibernate Search only stores the current version, and only if enabled. 1446 * 1447 * @param thePids the pids to check for versioned references 1448 * @return can we fetch from Hibernate Search? 1449 */ 1450 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1451 // is storage enabled? 1452 return myStorageSettings.isStoreResourceInHSearchIndex() 1453 && myStorageSettings.isAdvancedHSearchIndexing() 1454 && 1455 // we don't support history 1456 thePids.stream().noneMatch(p -> p.getVersion() != null) 1457 && 1458 // skip the complexity for metadata in dstu2 1459 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1460 } 1461 1462 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1463 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1464 // only impl 1465 // to handle lastN? 1466 if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) { 1467 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1468 1469 return myFulltextSearchSvc.getResources(pidList); 1470 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1471 // legacy LastN implementation 1472 return myIElasticsearchSvc.getObservationResources(thePids); 1473 } else { 1474 return Collections.emptyList(); 1475 } 1476 } 1477 1478 /** 1479 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1480 * so it can't be Collections.emptySet() or some such thing. 1481 * The JpaPid returned will have resource type populated. 1482 */ 1483 @Override 1484 public Set<JpaPid> loadIncludes( 1485 FhirContext theContext, 1486 EntityManager theEntityManager, 1487 Collection<JpaPid> theMatches, 1488 Collection<Include> theIncludes, 1489 boolean theReverseMode, 1490 DateRangeParam theLastUpdated, 1491 String theSearchIdOrDescription, 1492 RequestDetails theRequest, 1493 Integer theMaxCount) { 1494 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1495 parameters.setFhirContext(theContext); 1496 parameters.setEntityManager(theEntityManager); 1497 parameters.setMatches(theMatches); 1498 parameters.setIncludeFilters(theIncludes); 1499 parameters.setReverseMode(theReverseMode); 1500 parameters.setLastUpdated(theLastUpdated); 1501 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1502 parameters.setRequestDetails(theRequest); 1503 parameters.setMaxCount(theMaxCount); 1504 return loadIncludes(parameters); 1505 } 1506 1507 @Override 1508 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1509 Collection<JpaPid> matches = theParameters.getMatches(); 1510 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1511 boolean reverseMode = theParameters.isReverseMode(); 1512 EntityManager entityManager = theParameters.getEntityManager(); 1513 Integer maxCount = theParameters.getMaxCount(); 1514 FhirContext fhirContext = theParameters.getFhirContext(); 1515 RequestDetails request = theParameters.getRequestDetails(); 1516 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1517 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1518 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1519 IInterceptorBroadcaster compositeBroadcaster = 1520 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1521 1522 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1523 CurrentThreadCaptureQueriesListener.startCapturing(); 1524 } 1525 if (matches.isEmpty()) { 1526 return new HashSet<>(); 1527 } 1528 if (currentIncludes == null || currentIncludes.isEmpty()) { 1529 return new HashSet<>(); 1530 } 1531 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1532 String searchPartitionIdFieldName = 1533 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1534 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1535 String findPartitionIdFieldName = 1536 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1537 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1538 String findVersionFieldName = null; 1539 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1540 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1541 } 1542 1543 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1544 HashSet<JpaPid> allAdded = new HashSet<>(); 1545 HashSet<JpaPid> original = new HashSet<>(matches); 1546 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1547 1548 int roundCounts = 0; 1549 StopWatch w = new StopWatch(); 1550 1551 boolean addedSomeThisRound; 1552 do { 1553 roundCounts++; 1554 1555 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1556 1557 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1558 Include nextInclude = iter.next(); 1559 if (!nextInclude.isRecurse()) { 1560 iter.remove(); 1561 } 1562 1563 // Account for _include=* 1564 boolean matchAll = "*".equals(nextInclude.getValue()); 1565 1566 // Account for _include=[resourceType]:* 1567 String wantResourceType = null; 1568 if (!matchAll) { 1569 if ("*".equals(nextInclude.getParamName())) { 1570 wantResourceType = nextInclude.getParamType(); 1571 matchAll = true; 1572 } 1573 } 1574 1575 if (matchAll) { 1576 loadIncludesMatchAll( 1577 findPidFieldName, 1578 findPartitionIdFieldName, 1579 findResourceTypeFieldName, 1580 findVersionFieldName, 1581 searchPidFieldName, 1582 searchPartitionIdFieldName, 1583 wantResourceType, 1584 reverseMode, 1585 hasDesiredResourceTypes, 1586 nextRoundMatches, 1587 entityManager, 1588 maxCount, 1589 desiredResourceTypes, 1590 pidsToInclude, 1591 request); 1592 } else { 1593 loadIncludesMatchSpecific( 1594 nextInclude, 1595 fhirContext, 1596 findPidFieldName, 1597 findPartitionIdFieldName, 1598 findVersionFieldName, 1599 searchPidFieldName, 1600 searchPartitionIdFieldName, 1601 reverseMode, 1602 nextRoundMatches, 1603 entityManager, 1604 maxCount, 1605 pidsToInclude, 1606 request); 1607 } 1608 } 1609 1610 nextRoundMatches.clear(); 1611 for (JpaPid next : pidsToInclude) { 1612 if (!original.contains(next) && !allAdded.contains(next)) { 1613 nextRoundMatches.add(next); 1614 } else { 1615 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1616 } 1617 } 1618 1619 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1620 1621 if (maxCount != null && allAdded.size() >= maxCount) { 1622 break; 1623 } 1624 1625 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1626 1627 allAdded.removeAll(original); 1628 1629 ourLog.info( 1630 "Loaded {} {} in {} rounds and {} ms for search {}", 1631 allAdded.size(), 1632 reverseMode ? "_revincludes" : "_includes", 1633 roundCounts, 1634 w.getMillisAndRestart(), 1635 searchIdOrDescription); 1636 1637 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1638 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1639 } 1640 1641 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1642 // This can be used to remove results from the search result details before 1643 // the user has a chance to know that they were in the results 1644 if (!allAdded.isEmpty()) { 1645 1646 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1647 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1648 JpaPreResourceAccessDetails accessDetails = 1649 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1650 HookParams params = new HookParams() 1651 .add(IPreResourceAccessDetails.class, accessDetails) 1652 .add(RequestDetails.class, request) 1653 .addIfMatchesType(ServletRequestDetails.class, request); 1654 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1655 1656 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1657 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1658 JpaPid value = includedPidList.remove(i); 1659 if (value != null) { 1660 allAdded.remove(value); 1661 } 1662 } 1663 } 1664 } 1665 } 1666 1667 return allAdded; 1668 } 1669 1670 private void loadIncludesMatchSpecific( 1671 Include nextInclude, 1672 FhirContext fhirContext, 1673 String findPidFieldName, 1674 String findPartitionFieldName, 1675 String findVersionFieldName, 1676 String searchPidFieldName, 1677 String searchPartitionFieldName, 1678 boolean reverseMode, 1679 List<JpaPid> nextRoundMatches, 1680 EntityManager entityManager, 1681 Integer maxCount, 1682 HashSet<JpaPid> pidsToInclude, 1683 RequestDetails theRequest) { 1684 List<String> paths; 1685 1686 // Start replace 1687 RuntimeSearchParam param; 1688 String resType = nextInclude.getParamType(); 1689 if (isBlank(resType)) { 1690 return; 1691 } 1692 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1693 if (def == null) { 1694 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1695 return; 1696 } 1697 1698 String paramName = nextInclude.getParamName(); 1699 if (isNotBlank(paramName)) { 1700 param = mySearchParamRegistry.getActiveSearchParam( 1701 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1702 } else { 1703 param = null; 1704 } 1705 if (param == null) { 1706 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1707 return; 1708 } 1709 1710 paths = param.getPathsSplitForResourceType(resType); 1711 // end replace 1712 1713 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1714 1715 for (String nextPath : paths) { 1716 String findPidFieldSqlColumn = 1717 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1718 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1719 if (findVersionFieldName != null) { 1720 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1721 } 1722 if (myPartitionSettings.isDatabasePartitionMode()) { 1723 fieldsToLoad += ", r."; 1724 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1725 ? "partition_id" 1726 : "target_res_partition_id"; 1727 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1728 } 1729 1730 // Query for includes lookup has 2 cases 1731 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1732 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1733 // url in target_resource_url 1734 1735 // Case 1: 1736 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1737 1738 String searchPidFieldSqlColumn = 1739 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1740 StringBuilder localReferenceQuery = new StringBuilder(); 1741 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1742 localReferenceQuery.append(" FROM hfj_res_link r "); 1743 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1744 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1745 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1746 } 1747 localReferenceQuery 1748 .append(" AND r.") 1749 .append(searchPidFieldSqlColumn) 1750 .append(" IN (:target_pids) "); 1751 if (myPartitionSettings.isDatabasePartitionMode()) { 1752 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1753 ? "target_res_partition_id" 1754 : "partition_id"; 1755 localReferenceQuery 1756 .append("AND r.") 1757 .append(partitionFieldToSearch) 1758 .append(" = :search_partition_id "); 1759 } 1760 localReferenceQueryParams.put("src_path", nextPath); 1761 // we loop over target_pids later. 1762 if (targetResourceTypes != null) { 1763 if (targetResourceTypes.size() == 1) { 1764 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1765 localReferenceQueryParams.put( 1766 "target_resource_type", 1767 targetResourceTypes.iterator().next()); 1768 } else { 1769 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1770 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1771 } 1772 } 1773 1774 // Case 2: 1775 Pair<String, Map<String, Object>> canonicalQuery = 1776 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); 1777 1778 String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1779 1780 Map<String, Object> limitParams = new HashMap<>(); 1781 if (maxCount != null) { 1782 LinkedList<Object> bindVariables = new LinkedList<>(); 1783 sql = SearchQueryBuilder.applyLimitToSql( 1784 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1785 1786 // The dialect SQL limiter uses positional params, but we're using 1787 // named params here, so we need to replace the positional params 1788 // with equivalent named ones 1789 StringBuilder sb = new StringBuilder(); 1790 for (int i = 0; i < sql.length(); i++) { 1791 char nextChar = sql.charAt(i); 1792 if (nextChar == '?') { 1793 String nextName = "limit" + i; 1794 sb.append(':').append(nextName); 1795 limitParams.put(nextName, bindVariables.removeFirst()); 1796 } else { 1797 sb.append(nextChar); 1798 } 1799 } 1800 sql = sb.toString(); 1801 } 1802 1803 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1804 for (Collection<JpaPid> nextPartition : partitions) { 1805 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1806 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1807 if (myPartitionSettings.isDatabasePartitionMode()) { 1808 q.setParameter( 1809 "search_partition_id", 1810 nextPartition.iterator().next().getPartitionId()); 1811 } 1812 localReferenceQueryParams.forEach(q::setParameter); 1813 canonicalQuery.getRight().forEach(q::setParameter); 1814 limitParams.forEach(q::setParameter); 1815 1816 @SuppressWarnings("unchecked") 1817 List<Tuple> results = q.getResultList(); 1818 for (Tuple result : results) { 1819 if (result != null) { 1820 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1821 Long resourceVersion = null; 1822 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1823 resourceVersion = 1824 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1825 } 1826 Integer partitionId = null; 1827 if (myPartitionSettings.isDatabasePartitionMode()) { 1828 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1829 } 1830 1831 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1832 pid.setPartitionId(partitionId); 1833 pidsToInclude.add(pid); 1834 } 1835 } 1836 } 1837 } 1838 } 1839 1840 private void loadIncludesMatchAll( 1841 String findPidFieldName, 1842 String findPartitionFieldName, 1843 String findResourceTypeFieldName, 1844 String findVersionFieldName, 1845 String searchPidFieldName, 1846 String searchPartitionFieldName, 1847 String wantResourceType, 1848 boolean reverseMode, 1849 boolean hasDesiredResourceTypes, 1850 List<JpaPid> nextRoundMatches, 1851 EntityManager entityManager, 1852 Integer maxCount, 1853 List<String> desiredResourceTypes, 1854 HashSet<JpaPid> pidsToInclude, 1855 RequestDetails request) { 1856 StringBuilder sqlBuilder = new StringBuilder(); 1857 sqlBuilder.append("SELECT r.").append(findPidFieldName); 1858 sqlBuilder.append(", r.").append(findResourceTypeFieldName); 1859 sqlBuilder.append(", r.myTargetResourceUrl"); 1860 if (findVersionFieldName != null) { 1861 sqlBuilder.append(", r.").append(findVersionFieldName); 1862 } 1863 if (myPartitionSettings.isDatabasePartitionMode()) { 1864 sqlBuilder.append(", r.").append(findPartitionFieldName); 1865 } 1866 sqlBuilder.append(" FROM ResourceLink r WHERE "); 1867 1868 if (myPartitionSettings.isDatabasePartitionMode()) { 1869 sqlBuilder.append("r.").append(searchPartitionFieldName); 1870 sqlBuilder.append(" = :target_partition_id AND "); 1871 } 1872 1873 sqlBuilder.append("r.").append(searchPidFieldName); 1874 sqlBuilder.append(" IN (:target_pids)"); 1875 1876 /* 1877 * We need to set the resource type in 2 cases only: 1878 * 1) we are in $everything mode 1879 * (where we only want to fetch specific resource types, regardless of what is 1880 * available to fetch) 1881 * 2) we are doing revincludes 1882 * 1883 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1884 * should always be checking the source resource type on the resource link. We don't 1885 * actually index that column though by default, so in order to try and be efficient 1886 * we don't actually include it for includes (but we do for revincludes). This is 1887 * because for an include, it doesn't really make sense to include a different 1888 * resource type than the one you are searching on. 1889 */ 1890 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1891 // because mySourceResourceType is not part of the HFJ_RES_LINK 1892 // index, this might not be the most optimal performance. 1893 // but it is for an $everything operation (and maybe we should update the index) 1894 sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type"); 1895 } else { 1896 wantResourceType = null; 1897 } 1898 1899 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1900 // resources 1901 // (e.g. via Provenance, List, or Group) when in an $everything operation 1902 if (myParams != null 1903 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1904 sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); 1905 sqlBuilder.append(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE.stream() 1906 .collect(Collectors.joining("', '", " AND r.mySourceResourceType NOT IN ('", "')"))); 1907 } 1908 if (hasDesiredResourceTypes) { 1909 sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)"); 1910 } 1911 1912 String sql = sqlBuilder.toString(); 1913 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1914 for (Collection<JpaPid> nextPartition : partitions) { 1915 TypedQuery<?> q = entityManager.createQuery(sql, Object[].class); 1916 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1917 if (myPartitionSettings.isDatabasePartitionMode()) { 1918 q.setParameter( 1919 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1920 } 1921 if (wantResourceType != null) { 1922 q.setParameter("want_resource_type", wantResourceType); 1923 } 1924 if (maxCount != null) { 1925 q.setMaxResults(maxCount); 1926 } 1927 if (hasDesiredResourceTypes) { 1928 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1929 } 1930 List<?> results = q.getResultList(); 1931 Set<String> canonicalUrls = null; 1932 for (Object nextRow : results) { 1933 if (nextRow == null) { 1934 // This can happen if there are outgoing references which are canonical or point to 1935 // other servers 1936 continue; 1937 } 1938 1939 Long version = null; 1940 Long resourceId = (Long) ((Object[]) nextRow)[0]; 1941 String resourceType = (String) ((Object[]) nextRow)[1]; 1942 String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; 1943 Integer partitionId = null; 1944 int offset = 0; 1945 if (findVersionFieldName != null) { 1946 version = (Long) ((Object[]) nextRow)[3]; 1947 offset++; 1948 } 1949 if (myPartitionSettings.isDatabasePartitionMode()) { 1950 partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); 1951 } 1952 1953 if (resourceId != null) { 1954 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1955 pid.setPartitionId(partitionId); 1956 pidsToInclude.add(pid); 1957 } else if (resourceCanonicalUrl != null) { 1958 if (canonicalUrls == null) { 1959 canonicalUrls = new HashSet<>(); 1960 } 1961 canonicalUrls.add(resourceCanonicalUrl); 1962 } 1963 } 1964 1965 if (canonicalUrls != null) { 1966 String message = 1967 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 1968 firePerformanceWarning(request, message); 1969 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1970 } 1971 } 1972 } 1973 1974 private void loadCanonicalUrls( 1975 RequestDetails theRequestDetails, 1976 Set<String> theCanonicalUrls, 1977 EntityManager theEntityManager, 1978 HashSet<JpaPid> thePidsToInclude, 1979 boolean theReverse) { 1980 StringBuilder sqlBuilder; 1981 CanonicalUrlTargets canonicalUrlTargets = 1982 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 1983 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 1984 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); 1985 1986 sqlBuilder = new StringBuilder(); 1987 sqlBuilder.append("SELECT "); 1988 if (myPartitionSettings.isPartitioningEnabled()) { 1989 sqlBuilder.append("i.myPartitionIdValue, "); 1990 } 1991 sqlBuilder.append("i.myResourcePid "); 1992 1993 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 1994 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 1995 sqlBuilder.append("AND i.myUri IN (:uris)"); 1996 1997 String canonicalResSql = sqlBuilder.toString(); 1998 1999 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 2000 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 2001 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); 2002 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2003 List<Object[]> results = canonicalResIdQuery.getResultList(); 2004 for (var next : results) { 2005 if (next != null) { 2006 Integer partitionId = null; 2007 Long pid; 2008 if (next.length == 1) { 2009 pid = (Long) next[0]; 2010 } else { 2011 partitionId = (Integer) ((Object[]) next)[0]; 2012 pid = (Long) ((Object[]) next)[1]; 2013 } 2014 if (pid != null) { 2015 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2016 } 2017 } 2018 } 2019 } 2020 } 2021 2022 /** 2023 * Calls Performance Trace Hook 2024 * 2025 * @param request the request deatils 2026 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2027 */ 2028 private void callRawSqlHookWithCurrentThreadQueries( 2029 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2030 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2031 HookParams params = new HookParams() 2032 .add(RequestDetails.class, request) 2033 .addIfMatchesType(ServletRequestDetails.class, request) 2034 .add(SqlQueryList.class, capturedQueries); 2035 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2036 } 2037 2038 @Nullable 2039 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2040 String targetResourceType = defaultString(nextInclude.getParamTargetType(), null); 2041 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2042 Set<String> targetResourceTypes; 2043 if (targetResourceType != null) { 2044 targetResourceTypes = Set.of(targetResourceType); 2045 } else if (haveTargetTypesDefinedByParam) { 2046 targetResourceTypes = param.getTargets(); 2047 } else { 2048 // all types! 2049 targetResourceTypes = null; 2050 } 2051 return targetResourceTypes; 2052 } 2053 2054 @Nonnull 2055 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2056 String theVersionFieldName, 2057 Set<String> theTargetResourceTypes, 2058 boolean theReverse, 2059 RequestDetails theRequest) { 2060 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2061 if (theVersionFieldName != null) { 2062 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2063 fieldsToLoadFromSpidxUriTable += ", NULL"; 2064 } 2065 2066 if (myPartitionSettings.isDatabasePartitionMode()) { 2067 if (theReverse) { 2068 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2069 } else { 2070 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2071 } 2072 } 2073 2074 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2075 // But sp_name isn't indexed, so we use hash_identity instead. 2076 CanonicalUrlTargets canonicalUrlTargets = 2077 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2078 2079 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2080 StringBuilder canonicalUrlQuery = new StringBuilder(); 2081 canonicalUrlQuery 2082 .append("SELECT ") 2083 .append(fieldsToLoadFromSpidxUriTable) 2084 .append(' '); 2085 canonicalUrlQuery.append("FROM hfj_res_link r "); 2086 2087 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2088 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2089 if (myPartitionSettings.isDatabasePartitionMode()) { 2090 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2091 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); 2092 } 2093 if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { 2094 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2095 canonicalUriQueryParams.put( 2096 "uri_identity_hash", 2097 canonicalUrlTargets.myHashIdentityValues.iterator().next()); 2098 } else { 2099 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2100 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); 2101 } 2102 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2103 canonicalUrlQuery.append(")"); 2104 2105 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2106 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2107 canonicalUrlQuery.append(" AND"); 2108 if (myPartitionSettings.isDatabasePartitionMode()) { 2109 if (theReverse) { 2110 canonicalUrlQuery.append(" rUri.partition_id"); 2111 } else { 2112 canonicalUrlQuery.append(" r.partition_id"); 2113 } 2114 canonicalUrlQuery.append(" = :search_partition_id"); 2115 canonicalUrlQuery.append(" AND"); 2116 } 2117 if (theReverse) { 2118 canonicalUrlQuery.append(" rUri.res_id"); 2119 } else { 2120 canonicalUrlQuery.append(" r.src_resource_id"); 2121 } 2122 canonicalUrlQuery.append(" IN (:target_pids)"); 2123 2124 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2125 } 2126 2127 @Nonnull 2128 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2129 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2130 Set<String> targetResourceTypes = theTargetResourceTypes; 2131 if (targetResourceTypes == null) { 2132 /* 2133 * If we don't have a list of valid target types, we need to figure out a list of all 2134 * possible target types in order to perform the search of the URI index table. This is 2135 * because the hash_identity column encodes the resource type, so we'll need a hash 2136 * value for each possible target type. 2137 */ 2138 targetResourceTypes = new HashSet<>(); 2139 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2140 if (theReverse) { 2141 // For reverse includes, it is really hard to figure out what types 2142 // are actually potentially pointing to the type we're searching for 2143 // in this context, so let's just assume it could be anything. 2144 targetResourceTypes = possibleTypes; 2145 } else { 2146 for (var next : mySearchParamRegistry 2147 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2148 .values() 2149 .stream() 2150 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2151 .collect(Collectors.toList())) { 2152 2153 // If the reference points to a Reference (ie not a canonical or CanonicalReference) 2154 // then it doesn't matter here anyhow. The logic here only works for elements at the 2155 // root level of the document (e.g. QuestionnaireResponse.subject or 2156 // QuestionnaireResponse.subject.where(...)) but this is just an optimization 2157 // anyhow. 2158 if (next.getPath().startsWith(myResourceName + ".")) { 2159 String elementName = 2160 next.getPath().substring(next.getPath().indexOf('.') + 1); 2161 int secondDotIndex = elementName.indexOf('.'); 2162 if (secondDotIndex != -1) { 2163 elementName = elementName.substring(0, secondDotIndex); 2164 } 2165 BaseRuntimeChildDefinition child = 2166 myContext.getResourceDefinition(myResourceName).getChildByName(elementName); 2167 if (child != null) { 2168 BaseRuntimeElementDefinition<?> childDef = child.getChildByName(elementName); 2169 if (childDef != null) { 2170 if (childDef.getName().equals("Reference")) { 2171 continue; 2172 } 2173 } 2174 } 2175 } 2176 2177 if (!next.getTargets().isEmpty()) { 2178 // For each reference parameter on the resource type we're searching for, 2179 // add all the potential target types to the list of possible target 2180 // resource types we can look up. 2181 for (var nextTarget : next.getTargets()) { 2182 if (possibleTypes.contains(nextTarget)) { 2183 targetResourceTypes.add(nextTarget); 2184 } 2185 } 2186 } else { 2187 // If we have any references that don't define any target types, then 2188 // we need to assume that all enabled resource types are possible target 2189 // types 2190 targetResourceTypes.addAll(possibleTypes); 2191 break; 2192 } 2193 } 2194 } 2195 } 2196 assert !targetResourceTypes.isEmpty(); 2197 2198 Set<Long> hashIdentityValues = new HashSet<>(); 2199 Set<Integer> partitionIds = new HashSet<>(); 2200 for (String type : targetResourceTypes) { 2201 2202 RequestPartitionId readPartition; 2203 if (myPartitionSettings.isPartitioningEnabled()) { 2204 readPartition = 2205 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2206 } else { 2207 readPartition = RequestPartitionId.defaultPartition(); 2208 } 2209 if (readPartition.hasPartitionIds()) { 2210 partitionIds.addAll(readPartition.getPartitionIds()); 2211 } 2212 2213 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2214 myPartitionSettings, readPartition, type, "url"); 2215 hashIdentityValues.add(hashIdentity); 2216 } 2217 2218 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2219 } 2220 2221 static class CanonicalUrlTargets { 2222 2223 @Nonnull 2224 final Set<Long> myHashIdentityValues; 2225 2226 @Nonnull 2227 final Set<Integer> myPartitionIds; 2228 2229 public CanonicalUrlTargets(@Nonnull Set<Long> theHashIdentityValues, @Nonnull Set<Integer> thePartitionIds) { 2230 myHashIdentityValues = theHashIdentityValues; 2231 myPartitionIds = thePartitionIds; 2232 } 2233 } 2234 2235 /** 2236 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2237 * those pids where: 2238 * <ul> 2239 * <li>No single list is most than {@literal theMaxLoad} entries</li> 2240 * <li>Each list only contains JpaPids with the same partition ID</li> 2241 * </ul> 2242 */ 2243 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2244 2245 if (theNextRoundMatches.size() <= theMaxLoad) { 2246 boolean allSamePartition = true; 2247 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2248 if (!Objects.equals( 2249 theNextRoundMatches.get(i - 1).getPartitionId(), 2250 theNextRoundMatches.get(i).getPartitionId())) { 2251 allSamePartition = false; 2252 break; 2253 } 2254 } 2255 if (allSamePartition) { 2256 return Collections.singletonList(theNextRoundMatches); 2257 } 2258 } 2259 2260 // Break into partitioned sublists 2261 ListMultimap<String, JpaPid> lists = 2262 MultimapBuilder.hashKeys().arrayListValues().build(); 2263 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2264 String partitionId = nextRoundMatch.getPartitionId() != null 2265 ? nextRoundMatch.getPartitionId().toString() 2266 : ""; 2267 lists.put(partitionId, nextRoundMatch); 2268 } 2269 2270 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2271 for (String key : lists.keySet()) { 2272 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2273 retVal.addAll(nextPartition); 2274 } 2275 2276 // In unit test mode, we sort the results just for unit test predictability 2277 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2278 retVal = retVal.stream() 2279 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2280 .collect(Collectors.toList()); 2281 } 2282 2283 return retVal; 2284 } 2285 2286 private void attemptComboUniqueSpProcessing( 2287 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2288 RuntimeSearchParam comboParam = null; 2289 List<String> comboParamNames = null; 2290 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2291 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2292 if (!exactMatchParams.isEmpty()) { 2293 comboParam = exactMatchParams.get(0); 2294 comboParamNames = new ArrayList<>(theParams.keySet()); 2295 } 2296 2297 if (comboParam == null) { 2298 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2299 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2300 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2301 List<String> nextCandidateParamNames = 2302 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2303 .map(RuntimeSearchParam::getName) 2304 .collect(Collectors.toList()); 2305 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2306 comboParam = nextCandidate; 2307 comboParamNames = nextCandidateParamNames; 2308 break; 2309 } 2310 } 2311 } 2312 2313 if (comboParam != null) { 2314 Collections.sort(comboParamNames); 2315 2316 // Since we're going to remove elements below 2317 theParams.values().forEach(this::ensureSubListsAreWritable); 2318 2319 /* 2320 * Apply search against the combo param index in a loop: 2321 * 2322 * 1. First we check whether the actual parameter values in the 2323 * parameter map are actually usable for searching against the combo 2324 * param index. E.g. no search modifiers, date comparators, etc., 2325 * since these mean you can't use the combo index. 2326 * 2327 * 2. Apply and create the join SQl. We remove parameter values from 2328 * the map as we apply them, so any parameter values remaining in the 2329 * map after each loop haven't yet been factored into the SQL. 2330 * 2331 * The loop allows us to create multiple combo index joins if there 2332 * are multiple AND expressions for the related parameters. 2333 */ 2334 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2335 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2336 } 2337 } 2338 } 2339 2340 private void applyComboSearchParam( 2341 QueryStack theQueryStack, 2342 @Nonnull SearchParameterMap theParams, 2343 RequestDetails theRequest, 2344 List<String> theComboParamNames, 2345 RuntimeSearchParam theComboParam) { 2346 2347 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2348 for (String nextParamName : theComboParamNames) { 2349 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2350 inputs.add(nextValues); 2351 } 2352 2353 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2354 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2355 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2356 2357 StringBuilder searchStringBuilder = new StringBuilder(); 2358 searchStringBuilder.append(myResourceName); 2359 searchStringBuilder.append("?"); 2360 2361 boolean first = true; 2362 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2363 2364 String nextParamName = theComboParamNames.get(paramIndex); 2365 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2366 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2367 // As a result, we strip the prefix if present. 2368 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2369 2370 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2371 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2372 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2373 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2374 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2375 } 2376 } 2377 2378 if (first) { 2379 first = false; 2380 } else { 2381 searchStringBuilder.append('&'); 2382 } 2383 2384 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2385 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2386 2387 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2388 } 2389 2390 String indexString = searchStringBuilder.toString(); 2391 ourLog.debug( 2392 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2393 2394 indexStrings.add(indexString); 2395 } 2396 2397 // Just to make sure we're stable for tests 2398 indexStrings.sort(Comparator.naturalOrder()); 2399 2400 // Interceptor broadcast: JPA_PERFTRACE_INFO 2401 IInterceptorBroadcaster compositeBroadcaster = 2402 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2403 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2404 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2405 StorageProcessingMessage msg = new StorageProcessingMessage() 2406 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2407 + indexStringForLog); 2408 HookParams params = new HookParams() 2409 .add(RequestDetails.class, theRequest) 2410 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2411 .add(StorageProcessingMessage.class, msg); 2412 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2413 } 2414 2415 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2416 case UNIQUE: 2417 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2418 break; 2419 case NON_UNIQUE: 2420 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2421 break; 2422 } 2423 2424 // Remove any empty parameters remaining after this 2425 theParams.clean(); 2426 } 2427 2428 /** 2429 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2430 * searching against a combo param with the given parameter names. This might be {@literal false} if 2431 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2432 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2433 */ 2434 private boolean validateParamValuesAreValidForComboParam( 2435 RequestDetails theRequest, 2436 @Nonnull SearchParameterMap theParams, 2437 List<String> theComboParamNames, 2438 RuntimeSearchParam theComboParam) { 2439 boolean paramValuesAreValidForCombo = true; 2440 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2441 2442 for (String nextParamName : theComboParamNames) { 2443 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2444 2445 if (nextValues == null || nextValues.isEmpty()) { 2446 paramValuesAreValidForCombo = false; 2447 break; 2448 } 2449 2450 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2451 paramOrValues.add(nextAndValue); 2452 2453 for (IQueryParameterType nextOrValue : nextAndValue) { 2454 if (nextOrValue instanceof DateParam) { 2455 DateParam dateParam = (DateParam) nextOrValue; 2456 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2457 String message = "Search with params " + theComboParamNames 2458 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2459 + nextParamName + "'"; 2460 firePerformanceInfo(theRequest, message); 2461 paramValuesAreValidForCombo = false; 2462 break; 2463 } 2464 } 2465 if (nextOrValue instanceof BaseParamWithPrefix) { 2466 BaseParamWithPrefix<?> paramWithPrefix = (BaseParamWithPrefix<?>) nextOrValue; 2467 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2468 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2469 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2470 if (prefix != null && prefix != EQUAL) { 2471 String message = "Search with params " + theComboParamNames 2472 + " is not a candidate for combo searching - Parameter '" + nextParamName 2473 + "' has prefix: '" 2474 + paramWithPrefix.getPrefix().getValue() + "'"; 2475 firePerformanceInfo(theRequest, message); 2476 paramValuesAreValidForCombo = false; 2477 break; 2478 } 2479 } 2480 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2481 String message = "Search with params " + theComboParamNames 2482 + " is not a candidate for combo searching - Parameter '" + nextParamName 2483 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2484 firePerformanceInfo(theRequest, message); 2485 paramValuesAreValidForCombo = false; 2486 break; 2487 } 2488 } 2489 2490 // Reference params are only eligible for using a composite index if they 2491 // are qualified 2492 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2493 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2494 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2495 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2496 if (isBlank(param.getResourceType())) { 2497 ourLog.debug( 2498 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2499 paramValuesAreValidForCombo = false; 2500 break; 2501 } 2502 } 2503 2504 // Date params are not eligible for using composite unique index 2505 // as index could contain date with different precision (e.g. DAY, SECOND) 2506 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2507 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2508 ourLog.debug( 2509 "Search with params {} is not a candidate for combo searching - " 2510 + "Unique combo search parameter '{}' has DATE type", 2511 theComboParamNames, 2512 nextParamName); 2513 paramValuesAreValidForCombo = false; 2514 break; 2515 } 2516 } 2517 2518 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2519 ourLog.debug( 2520 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2521 paramValuesAreValidForCombo = false; 2522 } 2523 2524 return paramValuesAreValidForCombo; 2525 } 2526 2527 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2528 for (int i = 0; i < theListOfLists.size(); i++) { 2529 List<T> oldSubList = theListOfLists.get(i); 2530 if (!(oldSubList instanceof ArrayList)) { 2531 List<T> newSubList = new ArrayList<>(oldSubList); 2532 theListOfLists.set(i, newSubList); 2533 } 2534 } 2535 } 2536 2537 @Override 2538 public void setFetchSize(int theFetchSize) { 2539 myFetchSize = theFetchSize; 2540 } 2541 2542 public SearchParameterMap getParams() { 2543 return myParams; 2544 } 2545 2546 public CriteriaBuilder getBuilder() { 2547 return myCriteriaBuilder; 2548 } 2549 2550 public Class<? extends IBaseResource> getResourceType() { 2551 return myResourceType; 2552 } 2553 2554 public String getResourceName() { 2555 return myResourceName; 2556 } 2557 2558 /** 2559 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2560 */ 2561 public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2562 2563 private final RequestDetails myRequest; 2564 private final Set<JpaPid> myCurrentPids; 2565 private Iterator<JpaPid> myCurrentIterator; 2566 private JpaPid myNext; 2567 2568 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2569 myCurrentPids = new HashSet<>(thePidSet); 2570 myCurrentIterator = null; 2571 myRequest = theRequest; 2572 } 2573 2574 private void fetchNext() { 2575 while (myNext == null) { 2576 2577 if (myCurrentIterator == null) { 2578 Set<Include> includes = new HashSet<>(); 2579 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2580 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2581 for (IQueryParameterType type : typeList) { 2582 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2583 for (String resourceType : queryString.split(",")) { 2584 String rt = resourceType.trim(); 2585 if (isNotBlank(rt)) { 2586 includes.add(new Include(rt + ":*", true)); 2587 } 2588 } 2589 } 2590 } 2591 } 2592 if (includes.isEmpty()) { 2593 includes.add(new Include("*", true)); 2594 } 2595 Set<JpaPid> newPids = loadIncludes( 2596 myContext, 2597 myEntityManager, 2598 myCurrentPids, 2599 includes, 2600 false, 2601 getParams().getLastUpdated(), 2602 mySearchUuid, 2603 myRequest, 2604 null); 2605 myCurrentIterator = newPids.iterator(); 2606 } 2607 2608 if (myCurrentIterator.hasNext()) { 2609 myNext = myCurrentIterator.next(); 2610 } else { 2611 myNext = NO_MORE; 2612 } 2613 } 2614 } 2615 2616 @Override 2617 public boolean hasNext() { 2618 fetchNext(); 2619 return !NO_MORE.equals(myNext); 2620 } 2621 2622 @Override 2623 public JpaPid next() { 2624 fetchNext(); 2625 JpaPid retVal = myNext; 2626 myNext = null; 2627 return retVal; 2628 } 2629 } 2630 2631 /** 2632 * Basic Query iterator, used to fetch the results of a query. 2633 */ 2634 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2635 2636 private final SearchRuntimeDetails mySearchRuntimeDetails; 2637 private final RequestDetails myRequest; 2638 private final boolean myHaveRawSqlHooks; 2639 private final boolean myHavePerfTraceFoundIdHook; 2640 private final SortSpec mySort; 2641 private final Integer myOffset; 2642 private final IInterceptorBroadcaster myCompositeBroadcaster; 2643 private boolean myFirst = true; 2644 private IncludesIterator myIncludesIterator; 2645 /** 2646 * The next JpaPid value of the next result in this query. 2647 * Will not be null if fetched using getNext() 2648 */ 2649 private JpaPid myNext; 2650 /** 2651 * The current query result iterator running sql and supplying PIDs 2652 * @see #myQueryList 2653 */ 2654 private ISearchQueryExecutor myResultsIterator; 2655 2656 private boolean myFetchIncludesForEverythingOperation; 2657 /** 2658 * The count of resources skipped because they were seen in earlier results 2659 */ 2660 private int mySkipCount = 0; 2661 /** 2662 * The count of resources that are new in this search 2663 * (ie, not cached in previous searches) 2664 */ 2665 private int myNonSkipCount = 0; 2666 2667 /** 2668 * The list of queries to use to find all results. 2669 * Normal JPA queries will normally have a single entry. 2670 * Queries that involve Hibernate Search/Elastisearch may have 2671 * multiple queries because of chunking. 2672 * The $everything operation also jams some extra results in. 2673 */ 2674 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2675 2676 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2677 mySearchRuntimeDetails = theSearchRuntimeDetails; 2678 mySort = myParams.getSort(); 2679 myOffset = myParams.getOffset(); 2680 myRequest = theRequest; 2681 myCompositeBroadcaster = 2682 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2683 2684 // everything requires fetching recursively all related resources 2685 if (myParams.getEverythingMode() != null) { 2686 myFetchIncludesForEverythingOperation = true; 2687 } 2688 2689 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2690 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2691 } 2692 2693 private void fetchNext() { 2694 try { 2695 if (myHaveRawSqlHooks) { 2696 CurrentThreadCaptureQueriesListener.startCapturing(); 2697 } 2698 2699 // If we don't have a query yet, create one 2700 if (myResultsIterator == null) { 2701 if (!mySearchProperties.hasMaxResultsRequested()) { 2702 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2703 } 2704 2705 /* 2706 * assigns the results iterator 2707 * and populates the myQueryList. 2708 */ 2709 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2710 } 2711 2712 if (myNext == null) { 2713 // no next means we need a new query (if one is available) 2714 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2715 /* 2716 * Because we combine our DB searches with Lucene 2717 * sometimes we can have multiple results iterators 2718 * (with only some having data in them to extract). 2719 * 2720 * We'll iterate our results iterators until we 2721 * either run out of results iterators, or we 2722 * have one that actually has data in it. 2723 */ 2724 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2725 retrieveNextIteratorQuery(); 2726 } 2727 2728 if (!myResultsIterator.hasNext()) { 2729 // we couldn't find a results iterator; 2730 // we're done here 2731 break; 2732 } 2733 2734 JpaPid nextPid = myResultsIterator.next(); 2735 if (myHavePerfTraceFoundIdHook) { 2736 callPerformanceTracingHook(nextPid); 2737 } 2738 2739 if (nextPid != null) { 2740 if (!myPidSet.contains(nextPid)) { 2741 if (!mySearchProperties.isDeduplicateInDatabase()) { 2742 /* 2743 * We only add to the map if we aren't fetching "everything"; 2744 * otherwise, we let the de-duplication happen in the database 2745 * (see createChunkedQueryNormalSearch above), because it 2746 * saves memory that way. 2747 */ 2748 myPidSet.add(nextPid); 2749 } 2750 if (doNotSkipNextPidForEverything()) { 2751 myNext = nextPid; 2752 myNonSkipCount++; 2753 break; 2754 } 2755 } else { 2756 mySkipCount++; 2757 } 2758 } 2759 2760 if (!myResultsIterator.hasNext()) { 2761 if (mySearchProperties.hasMaxResultsRequested() 2762 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2763 if (mySkipCount > 0 && myNonSkipCount == 0) { 2764 sendProcessingMsgAndFirePerformanceHook(); 2765 // need the next iterator; increase the maxsize 2766 // (we should always do this) 2767 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2768 mySearchProperties.setMaxResultsRequested(maxResults); 2769 2770 if (!mySearchProperties.isDeduplicateInDatabase()) { 2771 // if we're not using the database to deduplicate 2772 // we should recheck our memory usage 2773 // the prefetch size check is future proofing 2774 int prefetchSize = myStorageSettings 2775 .getSearchPreFetchThresholds() 2776 .size(); 2777 if (prefetchSize > 0) { 2778 if (myStorageSettings 2779 .getSearchPreFetchThresholds() 2780 .get(prefetchSize - 1) 2781 < mySearchProperties.getMaxResultsRequested()) { 2782 mySearchProperties.setDeduplicateInDatabase(true); 2783 } 2784 } 2785 } 2786 2787 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2788 } 2789 } 2790 } 2791 } 2792 } 2793 2794 if (myNext == null) { 2795 // if we got here, it means the current JpaPid has already been processed, 2796 // and we will decide (here) if we need to fetch related resources recursively 2797 if (myFetchIncludesForEverythingOperation) { 2798 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2799 myFetchIncludesForEverythingOperation = false; 2800 } 2801 if (myIncludesIterator != null) { 2802 while (myIncludesIterator.hasNext()) { 2803 JpaPid next = myIncludesIterator.next(); 2804 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2805 myNext = next; 2806 break; 2807 } 2808 } 2809 if (myNext == null) { 2810 myNext = NO_MORE; 2811 } 2812 } else { 2813 myNext = NO_MORE; 2814 } 2815 } 2816 2817 if (!mySearchProperties.hasMaxResultsRequested()) { 2818 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2819 } else { 2820 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2821 } 2822 2823 } finally { 2824 // search finished - fire hooks 2825 if (myHaveRawSqlHooks) { 2826 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2827 } 2828 } 2829 2830 if (myFirst) { 2831 HookParams params = new HookParams() 2832 .add(RequestDetails.class, myRequest) 2833 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2834 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2835 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2836 myFirst = false; 2837 } 2838 2839 if (NO_MORE.equals(myNext)) { 2840 HookParams params = new HookParams() 2841 .add(RequestDetails.class, myRequest) 2842 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2843 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2844 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2845 } 2846 } 2847 2848 private Integer calculateMaxResultsToFetch() { 2849 if (myParams.getLoadSynchronousUpTo() != null) { 2850 return myParams.getLoadSynchronousUpTo(); 2851 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2852 return myParams.getEverythingMode() != null 2853 ? myParams.getOffset() + myParams.getCount() 2854 : myParams.getCount(); 2855 } else { 2856 return myStorageSettings.getFetchSizeDefaultMaximum(); 2857 } 2858 } 2859 2860 private boolean doNotSkipNextPidForEverything() { 2861 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2862 } 2863 2864 private void callPerformanceTracingHook(JpaPid theNextPid) { 2865 HookParams params = new HookParams() 2866 .add(Integer.class, System.identityHashCode(this)) 2867 .add(Object.class, theNextPid); 2868 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2869 } 2870 2871 private void sendProcessingMsgAndFirePerformanceHook() { 2872 String msg = "Pass completed with no matching results seeking rows " 2873 + myPidSet.size() + "-" + mySkipCount 2874 + ". This indicates an inefficient query! Retrying with new max count of " 2875 + mySearchProperties.getMaxResultsRequested(); 2876 firePerformanceWarning(myRequest, msg); 2877 } 2878 2879 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2880 Integer offset = theOffset; 2881 if (myQueryList.isEmpty()) { 2882 // Capture times for Lucene/Elasticsearch queries as well 2883 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2884 2885 // setting offset to 0 to fetch all resource ids to guarantee 2886 // correct output result for everything operation during paging 2887 if (myParams.getEverythingMode() != null) { 2888 offset = 0; 2889 } 2890 2891 SearchQueryProperties properties = mySearchProperties.clone(); 2892 properties 2893 .setOffset(offset) 2894 .setMaxResultsRequested(theMaxResultsToFetch) 2895 .setDoCountOnlyFlag(false) 2896 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2897 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2898 } 2899 2900 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2901 2902 retrieveNextIteratorQuery(); 2903 2904 mySkipCount = 0; 2905 myNonSkipCount = 0; 2906 } 2907 2908 private void retrieveNextIteratorQuery() { 2909 close(); 2910 if (isNotEmpty(myQueryList)) { 2911 myResultsIterator = myQueryList.remove(0); 2912 myHasNextIteratorQuery = true; 2913 } else { 2914 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2915 myHasNextIteratorQuery = false; 2916 } 2917 } 2918 2919 @Override 2920 public boolean hasNext() { 2921 if (myNext == null) { 2922 fetchNext(); 2923 } 2924 return !NO_MORE.equals(myNext); 2925 } 2926 2927 @Override 2928 public JpaPid next() { 2929 fetchNext(); 2930 JpaPid retVal = myNext; 2931 myNext = null; 2932 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2933 return retVal; 2934 } 2935 2936 @Override 2937 public int getSkippedCount() { 2938 return mySkipCount; 2939 } 2940 2941 @Override 2942 public int getNonSkippedCount() { 2943 return myNonSkipCount; 2944 } 2945 2946 @Override 2947 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2948 Collection<JpaPid> batch = new ArrayList<>(); 2949 while (this.hasNext() && batch.size() < theBatchSize) { 2950 batch.add(this.next()); 2951 } 2952 return batch; 2953 } 2954 2955 @Override 2956 public void close() { 2957 if (myResultsIterator != null) { 2958 myResultsIterator.close(); 2959 } 2960 myResultsIterator = null; 2961 } 2962 } 2963 2964 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2965 // Only log at debug level since these messages aren't considered important enough 2966 // that we should be cluttering the system log, but they are important to the 2967 // specific query being executed to we'll INFO level them there 2968 ourLog.debug(theMessage); 2969 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2970 } 2971 2972 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2973 ourLog.warn(theMessage); 2974 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2975 } 2976 2977 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 2978 IInterceptorBroadcaster compositeBroadcaster = 2979 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2980 if (compositeBroadcaster.hasHooks(thePointcut)) { 2981 StorageProcessingMessage message = new StorageProcessingMessage(); 2982 message.setMessage(theMessage); 2983 HookParams params = new HookParams() 2984 .add(RequestDetails.class, theRequest) 2985 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2986 .add(StorageProcessingMessage.class, message); 2987 compositeBroadcaster.callHooks(thePointcut, params); 2988 } 2989 } 2990 2991 public static int getMaximumPageSize() { 2992 if (myMaxPageSizeForTests != null) { 2993 return myMaxPageSizeForTests; 2994 } 2995 return MAXIMUM_PAGE_SIZE; 2996 } 2997 2998 public static void setMaxPageSizeForTest(Integer theTestSize) { 2999 myMaxPageSizeForTests = theTestSize; 3000 } 3001}