
001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.BaseRuntimeChildDefinition; 023import ca.uhn.fhir.context.BaseRuntimeElementDefinition; 024import ca.uhn.fhir.context.ComboSearchParamType; 025import ca.uhn.fhir.context.FhirContext; 026import ca.uhn.fhir.context.FhirVersionEnum; 027import ca.uhn.fhir.context.RuntimeResourceDefinition; 028import ca.uhn.fhir.context.RuntimeSearchParam; 029import ca.uhn.fhir.i18n.Msg; 030import ca.uhn.fhir.interceptor.api.HookParams; 031import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 032import ca.uhn.fhir.interceptor.api.Pointcut; 033import ca.uhn.fhir.interceptor.model.RequestPartitionId; 034import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 035import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 036import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 037import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 038import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 039import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 040import ca.uhn.fhir.jpa.dao.BaseStorageDao; 041import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 042import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 043import ca.uhn.fhir.jpa.dao.IResultIterator; 044import ca.uhn.fhir.jpa.dao.ISearchBuilder; 045import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 046import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 047import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 048import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 049import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 050import ca.uhn.fhir.jpa.model.config.PartitionSettings; 051import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 052import ca.uhn.fhir.jpa.model.dao.JpaPid; 053import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 054import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 055import ca.uhn.fhir.jpa.model.entity.BaseTag; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 057import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 058import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 059import ca.uhn.fhir.jpa.model.entity.ResourceTag; 060import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 061import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 062import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.StopWatch; 108import ca.uhn.fhir.util.StringUtil; 109import ca.uhn.fhir.util.UrlUtil; 110import com.google.common.annotations.VisibleForTesting; 111import com.google.common.collect.ListMultimap; 112import com.google.common.collect.Lists; 113import com.google.common.collect.MultimapBuilder; 114import com.healthmarketscience.sqlbuilder.Condition; 115import jakarta.annotation.Nonnull; 116import jakarta.annotation.Nullable; 117import jakarta.persistence.EntityManager; 118import jakarta.persistence.PersistenceContext; 119import jakarta.persistence.PersistenceContextType; 120import jakarta.persistence.Query; 121import jakarta.persistence.Tuple; 122import jakarta.persistence.TypedQuery; 123import jakarta.persistence.criteria.CriteriaBuilder; 124import org.apache.commons.collections4.ListUtils; 125import org.apache.commons.lang3.StringUtils; 126import org.apache.commons.lang3.Validate; 127import org.apache.commons.lang3.math.NumberUtils; 128import org.apache.commons.lang3.tuple.Pair; 129import org.hl7.fhir.instance.model.api.IAnyResource; 130import org.hl7.fhir.instance.model.api.IBaseResource; 131import org.hl7.fhir.instance.model.api.IIdType; 132import org.slf4j.Logger; 133import org.slf4j.LoggerFactory; 134import org.springframework.beans.factory.annotation.Autowired; 135import org.springframework.jdbc.core.JdbcTemplate; 136import org.springframework.transaction.support.TransactionSynchronizationManager; 137 138import java.util.ArrayList; 139import java.util.Collection; 140import java.util.Collections; 141import java.util.Comparator; 142import java.util.HashMap; 143import java.util.HashSet; 144import java.util.Iterator; 145import java.util.LinkedList; 146import java.util.List; 147import java.util.Map; 148import java.util.Objects; 149import java.util.Set; 150import java.util.stream.Collectors; 151 152import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 153import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 154import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 155import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 156import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 157import static java.util.Objects.requireNonNull; 158import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 159import static org.apache.commons.lang3.StringUtils.defaultString; 160import static org.apache.commons.lang3.StringUtils.isBlank; 161import static org.apache.commons.lang3.StringUtils.isNotBlank; 162import static org.apache.commons.lang3.StringUtils.stripStart; 163 164/** 165 * The SearchBuilder is responsible for actually forming the SQL query that handles 166 * searches for resources 167 */ 168public class SearchBuilder implements ISearchBuilder<JpaPid> { 169 170 /** 171 * See loadResourcesByPid 172 * for an explanation of why we use the constant 800 173 */ 174 // NB: keep public 175 @Deprecated 176 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 177 178 public static final String RESOURCE_ID_ALIAS = "resource_id"; 179 public static final String PARTITION_ID_ALIAS = "partition_id"; 180 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 181 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 182 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 183 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 184 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 185 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 186 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 187 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 188 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 189 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 190 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 191 public static boolean myUseMaxPageSize50ForTest = false; 192 public static Integer myMaxPageSizeForTests = null; 193 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 194 protected final IResourceTagDao myResourceTagDao; 195 private String myResourceName; 196 private final Class<? extends IBaseResource> myResourceType; 197 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 198 private final SqlObjectFactory mySqlBuilderFactory; 199 private final HibernatePropertiesProvider myDialectProvider; 200 private final ISearchParamRegistry mySearchParamRegistry; 201 private final PartitionSettings myPartitionSettings; 202 private final DaoRegistry myDaoRegistry; 203 private final FhirContext myContext; 204 private final IIdHelperService<JpaPid> myIdHelperService; 205 private final JpaStorageSettings myStorageSettings; 206 private final SearchQueryProperties mySearchProperties; 207 private final IResourceHistoryTableDao myResourceHistoryTableDao; 208 private final IJpaStorageResourceParser myJpaStorageResourceParser; 209 210 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 211 protected EntityManager myEntityManager; 212 213 private CriteriaBuilder myCriteriaBuilder; 214 private SearchParameterMap myParams; 215 private String mySearchUuid; 216 private int myFetchSize; 217 218 private boolean myRequiresTotal; 219 220 /** 221 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 222 */ 223 private Set<JpaPid> myPidSet; 224 225 private boolean myHasNextIteratorQuery = false; 226 private RequestPartitionId myRequestPartitionId; 227 228 private IFulltextSearchSvc myFulltextSearchSvc; 229 230 @Autowired(required = false) 231 public void setFullTextSearch(IFulltextSearchSvc theFulltextSearchSvc) { 232 myFulltextSearchSvc = theFulltextSearchSvc; 233 } 234 235 @Autowired(required = false) 236 private IElasticsearchSvc myIElasticsearchSvc; 237 238 @Autowired 239 private IResourceHistoryTagDao myResourceHistoryTagDao; 240 241 @Autowired 242 private IRequestPartitionHelperSvc myPartitionHelperSvc; 243 244 /** 245 * Constructor 246 */ 247 @SuppressWarnings({"rawtypes", "unchecked"}) 248 public SearchBuilder( 249 String theResourceName, 250 JpaStorageSettings theStorageSettings, 251 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 252 SqlObjectFactory theSqlBuilderFactory, 253 HibernatePropertiesProvider theDialectProvider, 254 ISearchParamRegistry theSearchParamRegistry, 255 PartitionSettings thePartitionSettings, 256 IInterceptorBroadcaster theInterceptorBroadcaster, 257 IResourceTagDao theResourceTagDao, 258 DaoRegistry theDaoRegistry, 259 FhirContext theContext, 260 IIdHelperService theIdHelperService, 261 IResourceHistoryTableDao theResourceHistoryTagDao, 262 IJpaStorageResourceParser theIJpaStorageResourceParser, 263 Class<? extends IBaseResource> theResourceType) { 264 myResourceName = theResourceName; 265 myResourceType = theResourceType; 266 myStorageSettings = theStorageSettings; 267 268 myEntityManagerFactory = theEntityManagerFactory; 269 mySqlBuilderFactory = theSqlBuilderFactory; 270 myDialectProvider = theDialectProvider; 271 mySearchParamRegistry = theSearchParamRegistry; 272 myPartitionSettings = thePartitionSettings; 273 myInterceptorBroadcaster = theInterceptorBroadcaster; 274 myResourceTagDao = theResourceTagDao; 275 myDaoRegistry = theDaoRegistry; 276 myContext = theContext; 277 myIdHelperService = theIdHelperService; 278 myResourceHistoryTableDao = theResourceHistoryTagDao; 279 myJpaStorageResourceParser = theIJpaStorageResourceParser; 280 281 mySearchProperties = new SearchQueryProperties(); 282 } 283 284 @VisibleForTesting 285 void setResourceName(String theName) { 286 myResourceName = theName; 287 } 288 289 @Override 290 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 291 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 292 } 293 294 @Override 295 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 296 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 297 } 298 299 @Override 300 public void setRequireTotal(boolean theRequireTotal) { 301 myRequiresTotal = theRequireTotal; 302 } 303 304 @Override 305 public boolean requiresTotal() { 306 return myRequiresTotal; 307 } 308 309 private void searchForIdsWithAndOr( 310 SearchQueryBuilder theSearchSqlBuilder, 311 QueryStack theQueryStack, 312 @Nonnull SearchParameterMap theParams, 313 RequestDetails theRequest) { 314 myParams = theParams; 315 mySearchProperties.setSortSpec(myParams.getSort()); 316 317 // Remove any empty parameters 318 theParams.clean(); 319 320 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 321 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 322 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 323 } 324 325 // Attempt to lookup via composite unique key. 326 if (isCompositeUniqueSpCandidate()) { 327 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 328 } 329 330 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 331 List<String> paramNames = myParams.keySet().stream() 332 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 333 .filter(t -> !t.equals(Constants.PARAM_TAG)) 334 .collect(Collectors.toList()); 335 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 336 paramNames.add(IAnyResource.SP_RES_ID); 337 } 338 if (myParams.containsKey(Constants.PARAM_TAG)) { 339 paramNames.add(Constants.PARAM_TAG); 340 } 341 342 // Handle each parameter 343 for (String nextParamName : paramNames) { 344 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 345 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 346 // Elasticsearch 347 continue; 348 } 349 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 350 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 351 .setParamName(nextParamName) 352 .setAndOrParams(andOrParams) 353 .setRequest(theRequest) 354 .setRequestPartitionId(myRequestPartitionId)); 355 if (predicate != null) { 356 theSearchSqlBuilder.addPredicate(predicate); 357 } 358 } 359 } 360 361 /** 362 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 363 * parameters all have no modifiers. 364 */ 365 private boolean isCompositeUniqueSpCandidate() { 366 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 367 } 368 369 @SuppressWarnings("ConstantConditions") 370 @Override 371 public Long createCountQuery( 372 SearchParameterMap theParams, 373 String theSearchUuid, 374 RequestDetails theRequest, 375 @Nonnull RequestPartitionId theRequestPartitionId) { 376 377 assert theRequestPartitionId != null; 378 assert TransactionSynchronizationManager.isActualTransactionActive(); 379 380 init(theParams, theSearchUuid, theRequestPartitionId); 381 382 if (checkUseHibernateSearch()) { 383 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 384 } 385 386 SearchQueryProperties properties = mySearchProperties.clone(); 387 properties.setDoCountOnlyFlag(true); 388 properties.setSortSpec(null); // counts don't require sorts 389 properties.setMaxResultsRequested(null); 390 properties.setOffset(null); 391 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 392 if (queries.isEmpty()) { 393 return 0L; 394 } else { 395 JpaPid jpaPid = queries.get(0).next(); 396 return jpaPid.getId(); 397 } 398 } 399 400 /** 401 * @param thePidSet May be null 402 */ 403 @Override 404 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 405 myPidSet = new HashSet<>(thePidSet); 406 } 407 408 @SuppressWarnings("ConstantConditions") 409 @Override 410 public IResultIterator<JpaPid> createQuery( 411 SearchParameterMap theParams, 412 SearchRuntimeDetails theSearchRuntimeDetails, 413 RequestDetails theRequest, 414 @Nonnull RequestPartitionId theRequestPartitionId) { 415 assert theRequestPartitionId != null; 416 assert TransactionSynchronizationManager.isActualTransactionActive(); 417 418 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 419 420 if (myPidSet == null) { 421 myPidSet = new HashSet<>(); 422 } 423 424 return new QueryIterator(theSearchRuntimeDetails, theRequest); 425 } 426 427 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 428 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 429 // we mutate the params. Make a private copy. 430 myParams = theParams.clone(); 431 mySearchProperties.setSortSpec(myParams.getSort()); 432 mySearchUuid = theSearchUuid; 433 myRequestPartitionId = theRequestPartitionId; 434 } 435 436 /** 437 * The query created can be either a count query or the 438 * actual query. 439 * This is why it takes a SearchQueryProperties object 440 * (and doesn't use the local version of it). 441 * The properties may differ slightly for whichever 442 * query this is. 443 */ 444 private List<ISearchQueryExecutor> createQuery( 445 SearchParameterMap theParams, 446 SearchQueryProperties theSearchProperties, 447 RequestDetails theRequest, 448 SearchRuntimeDetails theSearchRuntimeDetails) { 449 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 450 451 if (checkUseHibernateSearch()) { 452 // we're going to run at least part of the search against the Fulltext service. 453 454 // Ugh - we have two different return types for now 455 ISearchQueryExecutor fulltextExecutor = null; 456 List<JpaPid> fulltextMatchIds = null; 457 int resultCount = 0; 458 if (myParams.isLastN()) { 459 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 460 resultCount = fulltextMatchIds.size(); 461 } else if (myParams.getEverythingMode() != null) { 462 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 463 resultCount = fulltextMatchIds.size(); 464 } else { 465 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 466 // enabled SP indexing). 467 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 468 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 469 // extra work in Elastic. 470 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 471 472 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 473 // a page. 474 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 475 } 476 477 if (fulltextExecutor == null) { 478 fulltextExecutor = 479 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 480 } 481 482 if (theSearchRuntimeDetails != null) { 483 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 484 IInterceptorBroadcaster compositeBroadcaster = 485 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 486 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 487 HookParams params = new HookParams() 488 .add(RequestDetails.class, theRequest) 489 .addIfMatchesType(ServletRequestDetails.class, theRequest) 490 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 491 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 492 } 493 } 494 495 // can we skip the database entirely and return the pid list from here? 496 boolean canSkipDatabase = 497 // if we processed an AND clause, and it returned nothing, then nothing can match. 498 !fulltextExecutor.hasNext() 499 || 500 // Our hibernate search query doesn't respect partitions yet 501 (!myPartitionSettings.isPartitioningEnabled() 502 && 503 // were there AND terms left? Then we still need the db. 504 theParams.isEmpty() 505 && 506 // not every param is a param. :-( 507 theParams.getNearDistanceParam() == null 508 && 509 // todo MB don't we support _lastUpdated and _offset now? 510 theParams.getLastUpdated() == null 511 && theParams.getEverythingMode() == null 512 && theParams.getOffset() == null); 513 514 if (canSkipDatabase) { 515 ourLog.trace("Query finished after HSearch. Skip db query phase"); 516 if (theSearchProperties.hasMaxResultsRequested()) { 517 fulltextExecutor = SearchQueryExecutors.limited( 518 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 519 } 520 queries.add(fulltextExecutor); 521 } else { 522 ourLog.trace("Query needs db after HSearch. Chunking."); 523 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 524 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 525 new QueryChunker<JpaPid>() 526 .chunk( 527 fulltextExecutor, 528 SearchBuilder.getMaximumPageSize(), 529 // for each list of (SearchBuilder.getMaximumPageSize()) 530 // we create a chunked query and add it to 'queries' 531 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 532 } 533 } else { 534 // do everything in the database. 535 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 536 } 537 538 return queries; 539 } 540 541 /** 542 * Check to see if query should use Hibernate Search, and error if the query can't continue. 543 * 544 * @return true if the query should first be processed by Hibernate Search 545 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 546 */ 547 private boolean checkUseHibernateSearch() { 548 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 549 550 if (!fulltextEnabled) { 551 failIfUsed(Constants.PARAM_TEXT); 552 failIfUsed(Constants.PARAM_CONTENT); 553 } else { 554 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 555 final String paramName = sortSpec.getParamName(); 556 if (paramName.contains(".")) { 557 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 558 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 559 } 560 } 561 } 562 563 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 564 // can. 565 return fulltextEnabled 566 && myParams != null 567 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 568 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 569 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 570 } 571 572 private void failIfUsed(String theParamName) { 573 if (myParams.containsKey(theParamName)) { 574 throw new InvalidRequestException(Msg.code(1192) 575 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 576 } 577 } 578 579 private void failIfUsedWithChainedSort(String theParamName) { 580 if (myParams.containsKey(theParamName)) { 581 throw new InvalidRequestException(Msg.code(2524) 582 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 583 + theParamName); 584 } 585 } 586 587 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 588 // Can we use our hibernate search generated index on resource to support lastN?: 589 if (myStorageSettings.isAdvancedHSearchIndexing()) { 590 if (myFulltextSearchSvc == null) { 591 throw new InvalidRequestException(Msg.code(2027) 592 + "LastN operation is not enabled on this service, can not process this request"); 593 } 594 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 595 .map(t -> (JpaPid) t) 596 .collect(Collectors.toList()); 597 } else { 598 throw new InvalidRequestException( 599 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 600 } 601 } 602 603 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 604 JpaPid pid = null; 605 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 606 String idParamValue; 607 IQueryParameterType idParam = 608 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 609 if (idParam instanceof TokenParam) { 610 TokenParam idParm = (TokenParam) idParam; 611 idParamValue = idParm.getValue(); 612 } else { 613 StringParam idParm = (StringParam) idParam; 614 idParamValue = idParm.getValue(); 615 } 616 617 pid = myIdHelperService 618 .resolveResourceIdentity( 619 myRequestPartitionId, 620 myResourceName, 621 idParamValue, 622 ResolveIdentityMode.includeDeleted().cacheOk()) 623 .getPersistentId(); 624 } 625 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 626 } 627 628 private void doCreateChunkedQueries( 629 SearchParameterMap theParams, 630 List<JpaPid> thePids, 631 SearchQueryProperties theSearchQueryProperties, 632 RequestDetails theRequest, 633 ArrayList<ISearchQueryExecutor> theQueries) { 634 635 if (thePids.size() < getMaximumPageSize()) { 636 thePids = normalizeIdListForInClause(thePids); 637 } 638 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 639 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 640 } 641 642 /** 643 * Combs through the params for any _id parameters and extracts the PIDs for them 644 */ 645 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 646 // get all the IQueryParameterType objects 647 // for _id -> these should all be StringParam values 648 HashSet<IIdType> ids = new HashSet<>(); 649 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 650 for (List<IQueryParameterType> paramList : params) { 651 for (IQueryParameterType param : paramList) { 652 String id; 653 if (param instanceof StringParam) { 654 // we expect all _id values to be StringParams 655 id = ((StringParam) param).getValue(); 656 } else if (param instanceof TokenParam) { 657 id = ((TokenParam) param).getValue(); 658 } else { 659 // we do not expect the _id parameter to be a non-string value 660 throw new IllegalArgumentException( 661 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 662 } 663 664 IIdType idType = myContext.getVersion().newIdType(); 665 if (id.contains("/")) { 666 idType.setValue(id); 667 } else { 668 idType.setValue(myResourceName + "/" + id); 669 } 670 ids.add(idType); 671 } 672 } 673 674 // fetch our target Pids 675 // this will throw if an id is not found 676 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 677 myRequestPartitionId, 678 new ArrayList<>(ids), 679 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 680 681 // add the pids to targetPids 682 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 683 theTargetPids.add(pid.getPersistentId()); 684 } 685 } 686 687 private void createChunkedQuery( 688 SearchParameterMap theParams, 689 SearchQueryProperties theSearchProperties, 690 RequestDetails theRequest, 691 List<JpaPid> thePidList, 692 List<ISearchQueryExecutor> theSearchQueryExecutors) { 693 if (myParams.getEverythingMode() != null) { 694 createChunkedQueryForEverythingSearch( 695 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 696 } else { 697 createChunkedQueryNormalSearch( 698 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 699 } 700 } 701 702 private void createChunkedQueryNormalSearch( 703 SearchParameterMap theParams, 704 SearchQueryProperties theSearchProperties, 705 RequestDetails theRequest, 706 List<JpaPid> thePidList, 707 List<ISearchQueryExecutor> theSearchQueryExecutors) { 708 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 709 myContext, 710 myStorageSettings, 711 myPartitionSettings, 712 myRequestPartitionId, 713 myResourceName, 714 mySqlBuilderFactory, 715 myDialectProvider, 716 theSearchProperties.isDoCountOnlyFlag()); 717 QueryStack queryStack3 = new QueryStack( 718 theRequest, 719 theParams, 720 myStorageSettings, 721 myContext, 722 sqlBuilder, 723 mySearchParamRegistry, 724 myPartitionSettings); 725 726 if (theParams.keySet().size() > 1 727 || theParams.getSort() != null 728 || theParams.keySet().contains(Constants.PARAM_HAS) 729 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 730 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 731 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 732 if (activeComboParams.isEmpty()) { 733 sqlBuilder.setNeedResourceTableRoot(true); 734 } 735 } 736 737 /* 738 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 739 * specific filters with ORs as their root from working around the natural resource type / deletion 740 * status / partition IDs built into queries. 741 */ 742 if (theParams.containsKey(Constants.PARAM_FILTER)) { 743 Condition partitionIdPredicate = sqlBuilder 744 .getOrCreateResourceTablePredicateBuilder() 745 .createPartitionIdPredicate(myRequestPartitionId); 746 if (partitionIdPredicate != null) { 747 sqlBuilder.addPredicate(partitionIdPredicate); 748 } 749 } 750 751 // Normal search 752 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 753 754 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 755 // partition ID predicate in that case. 756 if (!sqlBuilder.haveAtLeastOnePredicate()) { 757 Condition partitionIdPredicate = sqlBuilder 758 .getOrCreateResourceTablePredicateBuilder() 759 .createPartitionIdPredicate(myRequestPartitionId); 760 if (partitionIdPredicate != null) { 761 sqlBuilder.addPredicate(partitionIdPredicate); 762 } 763 } 764 765 // Add PID list predicate for full text search and/or lastn operation 766 addPidListPredicate(thePidList, sqlBuilder); 767 768 // Last updated 769 addLastUpdatePredicate(sqlBuilder); 770 771 /* 772 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 773 * to something needed to guarantee correct results. 774 * 775 * Why do we need it? Suppose for example, a query like: 776 * Observation?category=foo,bar,baz 777 * And suppose you have many resources that have all 3 of these category codes. In this case 778 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 779 * we may exhaust the query results without getting enough distinct results back. When that 780 * happens we re-run the query with a larger limit. Excluding results we already know about 781 * tries to ensure that we get new unique results. 782 * 783 * The challenge with that though is that lots of DBs have an issue with too many 784 * parameters in one query. So we only do this optimization if there aren't too 785 * many results. 786 */ 787 if (myHasNextIteratorQuery) { 788 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 789 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 790 } 791 } 792 793 /* 794 * If offset is present, we want to deduplicate the results by using GROUP BY; 795 * OR 796 * if the MaxResultsToFetch is null, we are requesting "everything", 797 * so we'll let the db do the deduplication (instead of in-memory) 798 */ 799 if (theSearchProperties.isDeduplicateInDatabase()) { 800 queryStack3.addGrouping(); 801 queryStack3.setUseAggregate(true); 802 } 803 804 /* 805 * Sort 806 * 807 * If we have a sort, we wrap the criteria search (the search that actually 808 * finds the appropriate resources) in an outer search which is then sorted 809 */ 810 if (theSearchProperties.hasSort()) { 811 assert !theSearchProperties.isDoCountOnlyFlag(); 812 813 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 814 } 815 816 /* 817 * Now perform the search 818 */ 819 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 820 } 821 822 private void executeSearch( 823 SearchQueryProperties theProperties, 824 List<ISearchQueryExecutor> theSearchQueryExecutors, 825 SearchQueryBuilder sqlBuilder) { 826 GeneratedSql generatedSql = 827 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 828 if (!generatedSql.isMatchNothing()) { 829 SearchQueryExecutor executor = 830 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 831 theSearchQueryExecutors.add(executor); 832 } 833 } 834 835 private void createChunkedQueryForEverythingSearch( 836 RequestDetails theRequest, 837 SearchParameterMap theParams, 838 SearchQueryProperties theSearchQueryProperties, 839 List<JpaPid> thePidList, 840 List<ISearchQueryExecutor> theSearchQueryExecutors) { 841 842 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 843 myContext, 844 myStorageSettings, 845 myPartitionSettings, 846 myRequestPartitionId, 847 null, 848 mySqlBuilderFactory, 849 myDialectProvider, 850 theSearchQueryProperties.isDoCountOnlyFlag()); 851 852 QueryStack queryStack3 = new QueryStack( 853 theRequest, 854 theParams, 855 myStorageSettings, 856 myContext, 857 sqlBuilder, 858 mySearchParamRegistry, 859 myPartitionSettings); 860 861 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 862 863 Set<JpaPid> targetPids = new HashSet<>(); 864 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 865 866 extractTargetPidsFromIdParams(targetPids); 867 868 // add the target pids to our executors as the first 869 // results iterator to go through 870 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 871 } else { 872 // For Everything queries, we make the query root by the ResourceLink table, since this query 873 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 874 // the one problem with this approach is that it doesn't catch Patients that have absolutely 875 // nothing linked to them. So we do one additional query to make sure we catch those too. 876 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 877 myContext, 878 myStorageSettings, 879 myPartitionSettings, 880 myRequestPartitionId, 881 myResourceName, 882 mySqlBuilderFactory, 883 myDialectProvider, 884 theSearchQueryProperties.isDoCountOnlyFlag()); 885 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 886 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 887 String sql = allTargetsSql.getSql(); 888 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 889 890 List<JpaPid> output = 891 jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); 892 893 // we add a search executor to fetch unlinked patients first 894 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 895 } 896 897 List<String> typeSourceResources = new ArrayList<>(); 898 if (myParams.get(Constants.PARAM_TYPE) != null) { 899 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 900 } 901 902 queryStack3.addPredicateEverythingOperation( 903 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 904 905 // Add PID list predicate for full text search and/or lastn operation 906 addPidListPredicate(thePidList, sqlBuilder); 907 908 /* 909 * If offset is present, we want deduplicate the results by using GROUP BY 910 * ORDER BY is required to make sure we return unique results for each page 911 */ 912 if (theSearchQueryProperties.hasOffset()) { 913 queryStack3.addGrouping(); 914 queryStack3.addOrdering(); 915 queryStack3.setUseAggregate(true); 916 } 917 918 /* 919 * Now perform the search 920 */ 921 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 922 } 923 924 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 925 if (thePidList != null && !thePidList.isEmpty()) { 926 theSqlBuilder.addResourceIdsPredicate(thePidList); 927 } 928 } 929 930 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 931 DateRangeParam lu = myParams.getLastUpdated(); 932 if (lu != null && !lu.isEmpty()) { 933 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 934 theSqlBuilder.addPredicate(lastUpdatedPredicates); 935 } 936 } 937 938 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 939 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 940 jdbcTemplate.setFetchSize(myFetchSize); 941 if (theMaximumResults != null) { 942 jdbcTemplate.setMaxRows(theMaximumResults); 943 } 944 return jdbcTemplate; 945 } 946 947 private Collection<String> extractTypeSourceResourcesFromParams() { 948 949 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 950 951 // first off, let's flatten the list of list 952 List<IQueryParameterType> iQueryParameterTypesList = 953 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 954 955 // then, extract all elements of each CSV into one big list 956 List<String> resourceTypes = iQueryParameterTypesList.stream() 957 .map(param -> ((StringParam) param).getValue()) 958 .map(csvString -> List.of(csvString.split(","))) 959 .flatMap(List::stream) 960 .collect(Collectors.toList()); 961 962 Set<String> knownResourceTypes = myContext.getResourceTypes(); 963 964 // remove leading/trailing whitespaces if any and remove duplicates 965 Set<String> retVal = new HashSet<>(); 966 967 for (String type : resourceTypes) { 968 String trimmed = type.trim(); 969 if (!knownResourceTypes.contains(trimmed)) { 970 throw new ResourceNotFoundException( 971 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 972 } 973 retVal.add(trimmed); 974 } 975 976 return retVal; 977 } 978 979 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 980 return myStorageSettings.isIndexOnContainedResources() 981 && theParams.values().stream() 982 .flatMap(Collection::stream) 983 .flatMap(Collection::stream) 984 .anyMatch(ReferenceParam.class::isInstance); 985 } 986 987 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 988 if (theSort == null || isBlank(theSort.getParamName())) { 989 return; 990 } 991 992 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 993 994 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 995 996 theQueryStack.addSortOnResourceId(ascending); 997 998 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 999 1000 theQueryStack.addSortOnResourcePID(ascending); 1001 1002 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1003 1004 theQueryStack.addSortOnLastUpdated(ascending); 1005 1006 } else { 1007 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1008 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1009 1010 /* 1011 * If we have a sort like _sort=subject.name and we have an 1012 * uplifted refchain for that combination we can do it more efficiently 1013 * by using the index associated with the uplifted refchain. In this case, 1014 * we need to find the actual target search parameter (corresponding 1015 * to "name" in this example) so that we know what datatype it is. 1016 */ 1017 String paramName = theSort.getParamName(); 1018 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1019 String[] chains = StringUtils.split(paramName, '.'); 1020 if (chains.length == 2) { 1021 1022 // Given: Encounter?_sort=Patient:subject.name 1023 String referenceParam = chains[0]; // subject 1024 String referenceParamTargetType = null; // Patient 1025 String targetParam = chains[1]; // name 1026 1027 int colonIdx = referenceParam.indexOf(':'); 1028 if (colonIdx > -1) { 1029 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1030 referenceParam = referenceParam.substring(colonIdx + 1); 1031 } 1032 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1033 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1034 if (outerParam == null) { 1035 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1036 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1037 for (String nextTargetType : outerParam.getTargets()) { 1038 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1039 continue; 1040 } 1041 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1042 nextTargetType, 1043 targetParam, 1044 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1045 if (innerParam != null) { 1046 param = innerParam; 1047 break; 1048 } 1049 } 1050 } 1051 } 1052 } 1053 1054 int colonIdx = paramName.indexOf(':'); 1055 String referenceTargetType = null; 1056 if (colonIdx > -1) { 1057 referenceTargetType = paramName.substring(0, colonIdx); 1058 paramName = paramName.substring(colonIdx + 1); 1059 } 1060 1061 int dotIdx = paramName.indexOf('.'); 1062 String chainName = null; 1063 if (param == null && dotIdx > -1) { 1064 chainName = paramName.substring(dotIdx + 1); 1065 paramName = paramName.substring(0, dotIdx); 1066 if (chainName.contains(".")) { 1067 String msg = myContext 1068 .getLocalizer() 1069 .getMessageSanitized( 1070 BaseStorageDao.class, 1071 "invalidSortParameterTooManyChains", 1072 paramName + "." + chainName); 1073 throw new InvalidRequestException(Msg.code(2286) + msg); 1074 } 1075 } 1076 1077 if (param == null) { 1078 param = mySearchParamRegistry.getActiveSearchParam( 1079 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1080 } 1081 1082 if (param == null) { 1083 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1084 } 1085 1086 // param will never be null here (the above line throws if it does) 1087 // this is just to prevent the warning 1088 assert param != null; 1089 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1090 throw new InvalidRequestException( 1091 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1092 } 1093 1094 switch (param.getParamType()) { 1095 case STRING: 1096 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1097 break; 1098 case DATE: 1099 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1100 break; 1101 case REFERENCE: 1102 theQueryStack.addSortOnResourceLink( 1103 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1104 break; 1105 case TOKEN: 1106 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1107 break; 1108 case NUMBER: 1109 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1110 break; 1111 case URI: 1112 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1113 break; 1114 case QUANTITY: 1115 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1116 break; 1117 case COMPOSITE: 1118 List<RuntimeSearchParam> compositeList = 1119 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1120 if (compositeList == null) { 1121 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1122 + " is not defined by the resource " + myResourceName); 1123 } 1124 if (compositeList.size() != 2) { 1125 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1126 + " must have 2 composite types declared in parameter annotation, found " 1127 + compositeList.size()); 1128 } 1129 RuntimeSearchParam left = compositeList.get(0); 1130 RuntimeSearchParam right = compositeList.get(1); 1131 1132 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1133 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1134 1135 break; 1136 case SPECIAL: 1137 if (LOCATION_POSITION.equals(param.getPath())) { 1138 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1139 break; 1140 } 1141 throw new InvalidRequestException( 1142 Msg.code(2306) + "This server does not support _sort specifications of type " 1143 + param.getParamType() + " - Can't serve _sort=" + paramName); 1144 1145 case HAS: 1146 default: 1147 throw new InvalidRequestException( 1148 Msg.code(1197) + "This server does not support _sort specifications of type " 1149 + param.getParamType() + " - Can't serve _sort=" + paramName); 1150 } 1151 } 1152 1153 // Recurse 1154 createSort(theQueryStack, theSort.getChain(), theParams); 1155 } 1156 1157 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1158 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1159 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1160 String msg = myContext 1161 .getLocalizer() 1162 .getMessageSanitized( 1163 BaseStorageDao.class, 1164 "invalidSortParameter", 1165 theParamName, 1166 theResourceName, 1167 validSearchParameterNames); 1168 throw new InvalidRequestException(Msg.code(1194) + msg); 1169 } 1170 1171 private void createCompositeSort( 1172 QueryStack theQueryStack, 1173 RestSearchParameterTypeEnum theParamType, 1174 String theParamName, 1175 boolean theAscending) { 1176 1177 switch (theParamType) { 1178 case STRING: 1179 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1180 break; 1181 case DATE: 1182 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1183 break; 1184 case TOKEN: 1185 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1186 break; 1187 case QUANTITY: 1188 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1189 break; 1190 case NUMBER: 1191 case REFERENCE: 1192 case COMPOSITE: 1193 case URI: 1194 case HAS: 1195 case SPECIAL: 1196 default: 1197 throw new InvalidRequestException( 1198 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1199 + " on _sort=" + theParamName); 1200 } 1201 } 1202 1203 private void doLoadPids( 1204 Collection<JpaPid> thePids, 1205 Collection<JpaPid> theIncludedPids, 1206 List<IBaseResource> theResourceListToPopulate, 1207 boolean theForHistoryOperation, 1208 Map<Long, Integer> thePosition) { 1209 1210 Map<JpaPid, Long> resourcePidToVersion = null; 1211 for (JpaPid next : thePids) { 1212 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1213 if (resourcePidToVersion == null) { 1214 resourcePidToVersion = new HashMap<>(); 1215 } 1216 resourcePidToVersion.put(next, next.getVersion()); 1217 } 1218 } 1219 1220 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1221 if (versionlessPids.size() < getMaximumPageSize()) { 1222 versionlessPids = normalizeIdListForInClause(versionlessPids); 1223 } 1224 1225 // Load the resource bodies 1226 List<ResourceHistoryTable> resourceSearchViewList = 1227 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1228 JpaPidFk.fromPids(versionlessPids)); 1229 1230 /* 1231 * If we have specific versions to load, replace the history entries with the 1232 * correct ones 1233 * 1234 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1235 * version entity first, and by batching the fetches. But this is a fairly infrequently 1236 * used feature, and loading history entities by PK is a very efficient query so it's 1237 * not the end of the world 1238 */ 1239 if (resourcePidToVersion != null) { 1240 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1241 ResourceHistoryTable next = resourceSearchViewList.get(i); 1242 JpaPid resourceId = next.getPersistentId(); 1243 Long version = resourcePidToVersion.get(resourceId); 1244 resourceId.setVersion(version); 1245 if (version != null && !version.equals(next.getVersion())) { 1246 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1247 next.getResourceId().toFk(), version); 1248 resourceSearchViewList.set(i, replacement); 1249 } 1250 } 1251 } 1252 1253 // -- preload all tags with tag definition if any 1254 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1255 1256 for (ResourceHistoryTable next : resourceSearchViewList) { 1257 if (next.getDeleted() != null) { 1258 continue; 1259 } 1260 1261 Class<? extends IBaseResource> resourceType = 1262 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1263 1264 JpaPid resourceId = next.getPersistentId(); 1265 1266 if (resourcePidToVersion != null) { 1267 Long version = resourcePidToVersion.get(resourceId); 1268 resourceId.setVersion(version); 1269 } 1270 1271 IBaseResource resource = null; 1272 resource = myJpaStorageResourceParser.toResource( 1273 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1274 if (resource == null) { 1275 ourLog.warn( 1276 "Unable to find resource {}/{}/_history/{} in database", 1277 next.getResourceType(), 1278 next.getIdDt().getIdPart(), 1279 next.getVersion()); 1280 continue; 1281 } 1282 1283 Integer index = thePosition.get(resourceId.getId()); 1284 if (index == null) { 1285 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1286 continue; 1287 } 1288 1289 if (theIncludedPids.contains(resourceId)) { 1290 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1291 } else { 1292 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1293 } 1294 1295 // ensure there's enough space; "<=" because of 0-indexing 1296 while (theResourceListToPopulate.size() <= index) { 1297 theResourceListToPopulate.add(null); 1298 } 1299 theResourceListToPopulate.set(index, resource); 1300 } 1301 } 1302 1303 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1304 switch (myStorageSettings.getTagStorageMode()) { 1305 case VERSIONED: 1306 return getPidToTagMapVersioned(theHistoryTables); 1307 case NON_VERSIONED: 1308 return getPidToTagMapUnversioned(theHistoryTables); 1309 case INLINE: 1310 default: 1311 return Map.of(); 1312 } 1313 } 1314 1315 @Nonnull 1316 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1317 Collection<ResourceHistoryTable> theHistoryTables) { 1318 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1319 1320 // -- find all resource has tags 1321 for (ResourceHistoryTable resource : theHistoryTables) { 1322 if (resource.isHasTags()) { 1323 idList.add(resource.getId()); 1324 } 1325 } 1326 1327 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1328 1329 // -- no tags 1330 if (idList.isEmpty()) { 1331 return tagMap; 1332 } 1333 1334 // -- get all tags for the idList 1335 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1336 1337 // -- build the map, key = resourceId, value = list of ResourceTag 1338 JpaPid resourceId; 1339 Collection<BaseTag> tagCol; 1340 for (ResourceHistoryTag tag : tagList) { 1341 1342 resourceId = tag.getResourcePid(); 1343 tagCol = tagMap.get(resourceId); 1344 if (tagCol == null) { 1345 tagCol = new ArrayList<>(); 1346 tagCol.add(tag); 1347 tagMap.put(resourceId, tagCol); 1348 } else { 1349 tagCol.add(tag); 1350 } 1351 } 1352 1353 return tagMap; 1354 } 1355 1356 @Nonnull 1357 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1358 Collection<ResourceHistoryTable> theHistoryTables) { 1359 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1360 1361 // -- find all resource has tags 1362 for (ResourceHistoryTable resource : theHistoryTables) { 1363 if (resource.isHasTags()) { 1364 idList.add(resource.getResourceId()); 1365 } 1366 } 1367 1368 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1369 1370 // -- no tags 1371 if (idList.isEmpty()) { 1372 return tagMap; 1373 } 1374 1375 // -- get all tags for the idList 1376 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1377 1378 // -- build the map, key = resourceId, value = list of ResourceTag 1379 JpaPid resourceId; 1380 Collection<BaseTag> tagCol; 1381 for (ResourceTag tag : tagList) { 1382 1383 resourceId = tag.getResourceId(); 1384 tagCol = tagMap.get(resourceId); 1385 if (tagCol == null) { 1386 tagCol = new ArrayList<>(); 1387 tagCol.add(tag); 1388 tagMap.put(resourceId, tagCol); 1389 } else { 1390 tagCol.add(tag); 1391 } 1392 } 1393 1394 return tagMap; 1395 } 1396 1397 @Override 1398 public void loadResourcesByPid( 1399 Collection<JpaPid> thePids, 1400 Collection<JpaPid> theIncludedPids, 1401 List<IBaseResource> theResourceListToPopulate, 1402 boolean theForHistoryOperation, 1403 RequestDetails theDetails) { 1404 if (thePids.isEmpty()) { 1405 ourLog.debug("The include pids are empty"); 1406 } 1407 1408 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1409 // when running asserts 1410 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1411 1412 Map<Long, Integer> position = new HashMap<>(); 1413 int index = 0; 1414 for (JpaPid next : thePids) { 1415 position.put(next.getId(), index++); 1416 } 1417 1418 // Can we fast track this loading by checking elastic search? 1419 boolean isUsingElasticSearch = isLoadingFromElasticSearchSupported(thePids); 1420 if (isUsingElasticSearch) { 1421 try { 1422 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1423 return; 1424 1425 } catch (ResourceNotFoundInIndexException theE) { 1426 // some resources were not found in index, so we will inform this and resort to JPA search 1427 ourLog.warn( 1428 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1429 } 1430 } 1431 1432 // We only chunk because some jdbc drivers can't handle long param lists. 1433 QueryChunker.chunk( 1434 thePids, 1435 t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1436 } 1437 1438 /** 1439 * Check if we can load the resources from Hibernate Search instead of the database. 1440 * We assume this is faster. 1441 * <p> 1442 * Hibernate Search only stores the current version, and only if enabled. 1443 * 1444 * @param thePids the pids to check for versioned references 1445 * @return can we fetch from Hibernate Search? 1446 */ 1447 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1448 // is storage enabled? 1449 return myStorageSettings.isStoreResourceInHSearchIndex() 1450 && myStorageSettings.isAdvancedHSearchIndexing() 1451 && 1452 // we don't support history 1453 thePids.stream().noneMatch(p -> p.getVersion() != null) 1454 && 1455 // skip the complexity for metadata in dstu2 1456 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1457 } 1458 1459 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1460 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1461 // only impl 1462 // to handle lastN? 1463 if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) { 1464 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1465 1466 return myFulltextSearchSvc.getResources(pidList); 1467 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1468 // legacy LastN implementation 1469 return myIElasticsearchSvc.getObservationResources(thePids); 1470 } else { 1471 return Collections.emptyList(); 1472 } 1473 } 1474 1475 /** 1476 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1477 * so it can't be Collections.emptySet() or some such thing. 1478 * The JpaPid returned will have resource type populated. 1479 */ 1480 @Override 1481 public Set<JpaPid> loadIncludes( 1482 FhirContext theContext, 1483 EntityManager theEntityManager, 1484 Collection<JpaPid> theMatches, 1485 Collection<Include> theIncludes, 1486 boolean theReverseMode, 1487 DateRangeParam theLastUpdated, 1488 String theSearchIdOrDescription, 1489 RequestDetails theRequest, 1490 Integer theMaxCount) { 1491 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1492 parameters.setFhirContext(theContext); 1493 parameters.setEntityManager(theEntityManager); 1494 parameters.setMatches(theMatches); 1495 parameters.setIncludeFilters(theIncludes); 1496 parameters.setReverseMode(theReverseMode); 1497 parameters.setLastUpdated(theLastUpdated); 1498 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1499 parameters.setRequestDetails(theRequest); 1500 parameters.setMaxCount(theMaxCount); 1501 return loadIncludes(parameters); 1502 } 1503 1504 @Override 1505 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1506 Collection<JpaPid> matches = theParameters.getMatches(); 1507 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1508 boolean reverseMode = theParameters.isReverseMode(); 1509 EntityManager entityManager = theParameters.getEntityManager(); 1510 Integer maxCount = theParameters.getMaxCount(); 1511 FhirContext fhirContext = theParameters.getFhirContext(); 1512 RequestDetails request = theParameters.getRequestDetails(); 1513 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1514 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1515 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1516 IInterceptorBroadcaster compositeBroadcaster = 1517 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1518 1519 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1520 CurrentThreadCaptureQueriesListener.startCapturing(); 1521 } 1522 if (matches.isEmpty()) { 1523 return new HashSet<>(); 1524 } 1525 if (currentIncludes == null || currentIncludes.isEmpty()) { 1526 return new HashSet<>(); 1527 } 1528 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1529 String searchPartitionIdFieldName = 1530 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1531 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1532 String findPartitionIdFieldName = 1533 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1534 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1535 String findVersionFieldName = null; 1536 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1537 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1538 } 1539 1540 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1541 HashSet<JpaPid> allAdded = new HashSet<>(); 1542 HashSet<JpaPid> original = new HashSet<>(matches); 1543 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1544 1545 int roundCounts = 0; 1546 StopWatch w = new StopWatch(); 1547 1548 boolean addedSomeThisRound; 1549 do { 1550 roundCounts++; 1551 1552 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1553 1554 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1555 Include nextInclude = iter.next(); 1556 if (!nextInclude.isRecurse()) { 1557 iter.remove(); 1558 } 1559 1560 // Account for _include=* 1561 boolean matchAll = "*".equals(nextInclude.getValue()); 1562 1563 // Account for _include=[resourceType]:* 1564 String wantResourceType = null; 1565 if (!matchAll) { 1566 if ("*".equals(nextInclude.getParamName())) { 1567 wantResourceType = nextInclude.getParamType(); 1568 matchAll = true; 1569 } 1570 } 1571 1572 if (matchAll) { 1573 loadIncludesMatchAll( 1574 findPidFieldName, 1575 findPartitionIdFieldName, 1576 findResourceTypeFieldName, 1577 findVersionFieldName, 1578 searchPidFieldName, 1579 searchPartitionIdFieldName, 1580 wantResourceType, 1581 reverseMode, 1582 hasDesiredResourceTypes, 1583 nextRoundMatches, 1584 entityManager, 1585 maxCount, 1586 desiredResourceTypes, 1587 pidsToInclude, 1588 request); 1589 } else { 1590 loadIncludesMatchSpecific( 1591 nextInclude, 1592 fhirContext, 1593 findPidFieldName, 1594 findPartitionIdFieldName, 1595 findVersionFieldName, 1596 searchPidFieldName, 1597 searchPartitionIdFieldName, 1598 reverseMode, 1599 nextRoundMatches, 1600 entityManager, 1601 maxCount, 1602 pidsToInclude, 1603 request); 1604 } 1605 } 1606 1607 nextRoundMatches.clear(); 1608 for (JpaPid next : pidsToInclude) { 1609 if (!original.contains(next) && !allAdded.contains(next)) { 1610 nextRoundMatches.add(next); 1611 } else { 1612 ourLog.trace("Skipping include since it has already been seen. [jpaPid={}]", next); 1613 } 1614 } 1615 1616 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1617 1618 if (maxCount != null && allAdded.size() >= maxCount) { 1619 break; 1620 } 1621 1622 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1623 1624 allAdded.removeAll(original); 1625 1626 ourLog.info( 1627 "Loaded {} {} in {} rounds and {} ms for search {}", 1628 allAdded.size(), 1629 reverseMode ? "_revincludes" : "_includes", 1630 roundCounts, 1631 w.getMillisAndRestart(), 1632 searchIdOrDescription); 1633 1634 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1635 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1636 } 1637 1638 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1639 // This can be used to remove results from the search result details before 1640 // the user has a chance to know that they were in the results 1641 if (!allAdded.isEmpty()) { 1642 1643 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1644 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1645 JpaPreResourceAccessDetails accessDetails = 1646 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1647 HookParams params = new HookParams() 1648 .add(IPreResourceAccessDetails.class, accessDetails) 1649 .add(RequestDetails.class, request) 1650 .addIfMatchesType(ServletRequestDetails.class, request); 1651 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1652 1653 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1654 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1655 JpaPid value = includedPidList.remove(i); 1656 if (value != null) { 1657 allAdded.remove(value); 1658 } 1659 } 1660 } 1661 } 1662 } 1663 1664 return allAdded; 1665 } 1666 1667 private void loadIncludesMatchSpecific( 1668 Include nextInclude, 1669 FhirContext fhirContext, 1670 String findPidFieldName, 1671 String findPartitionFieldName, 1672 String findVersionFieldName, 1673 String searchPidFieldName, 1674 String searchPartitionFieldName, 1675 boolean reverseMode, 1676 List<JpaPid> nextRoundMatches, 1677 EntityManager entityManager, 1678 Integer maxCount, 1679 HashSet<JpaPid> pidsToInclude, 1680 RequestDetails theRequest) { 1681 List<String> paths; 1682 1683 // Start replace 1684 RuntimeSearchParam param; 1685 String resType = nextInclude.getParamType(); 1686 if (isBlank(resType)) { 1687 return; 1688 } 1689 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1690 if (def == null) { 1691 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1692 return; 1693 } 1694 1695 String paramName = nextInclude.getParamName(); 1696 if (isNotBlank(paramName)) { 1697 param = mySearchParamRegistry.getActiveSearchParam( 1698 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1699 } else { 1700 param = null; 1701 } 1702 if (param == null) { 1703 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1704 return; 1705 } 1706 1707 paths = param.getPathsSplitForResourceType(resType); 1708 // end replace 1709 1710 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1711 1712 for (String nextPath : paths) { 1713 String findPidFieldSqlColumn = 1714 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1715 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1716 if (findVersionFieldName != null) { 1717 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1718 } 1719 if (myPartitionSettings.isDatabasePartitionMode()) { 1720 fieldsToLoad += ", r."; 1721 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1722 ? "partition_id" 1723 : "target_res_partition_id"; 1724 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1725 } 1726 1727 // Query for includes lookup has 2 cases 1728 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1729 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1730 // url in target_resource_url 1731 1732 // Case 1: 1733 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1734 1735 String searchPidFieldSqlColumn = 1736 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1737 StringBuilder localReferenceQuery = new StringBuilder(); 1738 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1739 localReferenceQuery.append(" FROM hfj_res_link r "); 1740 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1741 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1742 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1743 } 1744 localReferenceQuery 1745 .append(" AND r.") 1746 .append(searchPidFieldSqlColumn) 1747 .append(" IN (:target_pids) "); 1748 if (myPartitionSettings.isDatabasePartitionMode()) { 1749 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1750 ? "target_res_partition_id" 1751 : "partition_id"; 1752 localReferenceQuery 1753 .append("AND r.") 1754 .append(partitionFieldToSearch) 1755 .append(" = :search_partition_id "); 1756 } 1757 localReferenceQueryParams.put("src_path", nextPath); 1758 // we loop over target_pids later. 1759 if (targetResourceTypes != null) { 1760 if (targetResourceTypes.size() == 1) { 1761 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1762 localReferenceQueryParams.put( 1763 "target_resource_type", 1764 targetResourceTypes.iterator().next()); 1765 } else { 1766 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1767 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1768 } 1769 } 1770 1771 // Case 2: 1772 Pair<String, Map<String, Object>> canonicalQuery = 1773 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); 1774 1775 String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1776 1777 Map<String, Object> limitParams = new HashMap<>(); 1778 if (maxCount != null) { 1779 LinkedList<Object> bindVariables = new LinkedList<>(); 1780 sql = SearchQueryBuilder.applyLimitToSql( 1781 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1782 1783 // The dialect SQL limiter uses positional params, but we're using 1784 // named params here, so we need to replace the positional params 1785 // with equivalent named ones 1786 StringBuilder sb = new StringBuilder(); 1787 for (int i = 0; i < sql.length(); i++) { 1788 char nextChar = sql.charAt(i); 1789 if (nextChar == '?') { 1790 String nextName = "limit" + i; 1791 sb.append(':').append(nextName); 1792 limitParams.put(nextName, bindVariables.removeFirst()); 1793 } else { 1794 sb.append(nextChar); 1795 } 1796 } 1797 sql = sb.toString(); 1798 } 1799 1800 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1801 for (Collection<JpaPid> nextPartition : partitions) { 1802 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1803 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1804 if (myPartitionSettings.isDatabasePartitionMode()) { 1805 q.setParameter( 1806 "search_partition_id", 1807 nextPartition.iterator().next().getPartitionId()); 1808 } 1809 localReferenceQueryParams.forEach(q::setParameter); 1810 canonicalQuery.getRight().forEach(q::setParameter); 1811 limitParams.forEach(q::setParameter); 1812 1813 @SuppressWarnings("unchecked") 1814 List<Tuple> results = q.getResultList(); 1815 for (Tuple result : results) { 1816 if (result != null) { 1817 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1818 Long resourceVersion = null; 1819 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1820 resourceVersion = 1821 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1822 } 1823 Integer partitionId = null; 1824 if (myPartitionSettings.isDatabasePartitionMode()) { 1825 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1826 } 1827 1828 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1829 pid.setPartitionId(partitionId); 1830 pidsToInclude.add(pid); 1831 } 1832 } 1833 } 1834 } 1835 } 1836 1837 private void loadIncludesMatchAll( 1838 String findPidFieldName, 1839 String findPartitionFieldName, 1840 String findResourceTypeFieldName, 1841 String findVersionFieldName, 1842 String searchPidFieldName, 1843 String searchPartitionFieldName, 1844 String wantResourceType, 1845 boolean reverseMode, 1846 boolean hasDesiredResourceTypes, 1847 List<JpaPid> nextRoundMatches, 1848 EntityManager entityManager, 1849 Integer maxCount, 1850 List<String> desiredResourceTypes, 1851 HashSet<JpaPid> pidsToInclude, 1852 RequestDetails request) { 1853 StringBuilder sqlBuilder = new StringBuilder(); 1854 sqlBuilder.append("SELECT r.").append(findPidFieldName); 1855 sqlBuilder.append(", r.").append(findResourceTypeFieldName); 1856 sqlBuilder.append(", r.myTargetResourceUrl"); 1857 if (findVersionFieldName != null) { 1858 sqlBuilder.append(", r.").append(findVersionFieldName); 1859 } 1860 if (myPartitionSettings.isDatabasePartitionMode()) { 1861 sqlBuilder.append(", r.").append(findPartitionFieldName); 1862 } 1863 sqlBuilder.append(" FROM ResourceLink r WHERE "); 1864 1865 if (myPartitionSettings.isDatabasePartitionMode()) { 1866 sqlBuilder.append("r.").append(searchPartitionFieldName); 1867 sqlBuilder.append(" = :target_partition_id AND "); 1868 } 1869 1870 sqlBuilder.append("r.").append(searchPidFieldName); 1871 sqlBuilder.append(" IN (:target_pids)"); 1872 1873 /* 1874 * We need to set the resource type in 2 cases only: 1875 * 1) we are in $everything mode 1876 * (where we only want to fetch specific resource types, regardless of what is 1877 * available to fetch) 1878 * 2) we are doing revincludes 1879 * 1880 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1881 * should always be checking the source resource type on the resource link. We don't 1882 * actually index that column though by default, so in order to try and be efficient 1883 * we don't actually include it for includes (but we do for revincludes). This is 1884 * because for an include, it doesn't really make sense to include a different 1885 * resource type than the one you are searching on. 1886 */ 1887 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1888 // because mySourceResourceType is not part of the HFJ_RES_LINK 1889 // index, this might not be the most optimal performance. 1890 // but it is for an $everything operation (and maybe we should update the index) 1891 sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type"); 1892 } else { 1893 wantResourceType = null; 1894 } 1895 1896 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1897 // resources 1898 // (e.g. via Provenance, List, or Group) when in an $everything operation 1899 if (myParams != null 1900 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1901 sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); 1902 sqlBuilder.append(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE.stream() 1903 .collect(Collectors.joining("', '", " AND r.mySourceResourceType NOT IN ('", "')"))); 1904 } 1905 if (hasDesiredResourceTypes) { 1906 sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)"); 1907 } 1908 1909 String sql = sqlBuilder.toString(); 1910 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1911 for (Collection<JpaPid> nextPartition : partitions) { 1912 TypedQuery<?> q = entityManager.createQuery(sql, Object[].class); 1913 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1914 if (myPartitionSettings.isDatabasePartitionMode()) { 1915 q.setParameter( 1916 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1917 } 1918 if (wantResourceType != null) { 1919 q.setParameter("want_resource_type", wantResourceType); 1920 } 1921 if (maxCount != null) { 1922 q.setMaxResults(maxCount); 1923 } 1924 if (hasDesiredResourceTypes) { 1925 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1926 } 1927 List<?> results = q.getResultList(); 1928 Set<String> canonicalUrls = null; 1929 for (Object nextRow : results) { 1930 if (nextRow == null) { 1931 // This can happen if there are outgoing references which are canonical or point to 1932 // other servers 1933 continue; 1934 } 1935 1936 Long version = null; 1937 Long resourceId = (Long) ((Object[]) nextRow)[0]; 1938 String resourceType = (String) ((Object[]) nextRow)[1]; 1939 String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; 1940 Integer partitionId = null; 1941 int offset = 0; 1942 if (findVersionFieldName != null) { 1943 version = (Long) ((Object[]) nextRow)[3]; 1944 offset++; 1945 } 1946 if (myPartitionSettings.isDatabasePartitionMode()) { 1947 partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); 1948 } 1949 1950 if (resourceId != null) { 1951 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1952 pid.setPartitionId(partitionId); 1953 pidsToInclude.add(pid); 1954 } else if (resourceCanonicalUrl != null) { 1955 if (canonicalUrls == null) { 1956 canonicalUrls = new HashSet<>(); 1957 } 1958 canonicalUrls.add(resourceCanonicalUrl); 1959 } 1960 } 1961 1962 if (canonicalUrls != null) { 1963 String message = 1964 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 1965 firePerformanceWarning(request, message); 1966 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1967 } 1968 } 1969 } 1970 1971 private void loadCanonicalUrls( 1972 RequestDetails theRequestDetails, 1973 Set<String> theCanonicalUrls, 1974 EntityManager theEntityManager, 1975 HashSet<JpaPid> thePidsToInclude, 1976 boolean theReverse) { 1977 StringBuilder sqlBuilder; 1978 CanonicalUrlTargets canonicalUrlTargets = 1979 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 1980 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 1981 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); 1982 1983 sqlBuilder = new StringBuilder(); 1984 sqlBuilder.append("SELECT "); 1985 if (myPartitionSettings.isPartitioningEnabled()) { 1986 sqlBuilder.append("i.myPartitionIdValue, "); 1987 } 1988 sqlBuilder.append("i.myResourcePid "); 1989 1990 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 1991 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 1992 sqlBuilder.append("AND i.myUri IN (:uris)"); 1993 1994 String canonicalResSql = sqlBuilder.toString(); 1995 1996 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 1997 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 1998 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); 1999 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 2000 List<Object[]> results = canonicalResIdQuery.getResultList(); 2001 for (var next : results) { 2002 if (next != null) { 2003 Integer partitionId = null; 2004 Long pid; 2005 if (next.length == 1) { 2006 pid = (Long) next[0]; 2007 } else { 2008 partitionId = (Integer) ((Object[]) next)[0]; 2009 pid = (Long) ((Object[]) next)[1]; 2010 } 2011 if (pid != null) { 2012 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2013 } 2014 } 2015 } 2016 } 2017 } 2018 2019 /** 2020 * Calls Performance Trace Hook 2021 * 2022 * @param request the request deatils 2023 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2024 */ 2025 private void callRawSqlHookWithCurrentThreadQueries( 2026 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2027 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2028 HookParams params = new HookParams() 2029 .add(RequestDetails.class, request) 2030 .addIfMatchesType(ServletRequestDetails.class, request) 2031 .add(SqlQueryList.class, capturedQueries); 2032 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2033 } 2034 2035 @Nullable 2036 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2037 String targetResourceType = defaultString(nextInclude.getParamTargetType(), null); 2038 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2039 Set<String> targetResourceTypes; 2040 if (targetResourceType != null) { 2041 targetResourceTypes = Set.of(targetResourceType); 2042 } else if (haveTargetTypesDefinedByParam) { 2043 targetResourceTypes = param.getTargets(); 2044 } else { 2045 // all types! 2046 targetResourceTypes = null; 2047 } 2048 return targetResourceTypes; 2049 } 2050 2051 @Nonnull 2052 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2053 String theVersionFieldName, 2054 Set<String> theTargetResourceTypes, 2055 boolean theReverse, 2056 RequestDetails theRequest) { 2057 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2058 if (theVersionFieldName != null) { 2059 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2060 fieldsToLoadFromSpidxUriTable += ", NULL"; 2061 } 2062 2063 if (myPartitionSettings.isDatabasePartitionMode()) { 2064 if (theReverse) { 2065 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2066 } else { 2067 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2068 } 2069 } 2070 2071 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2072 // But sp_name isn't indexed, so we use hash_identity instead. 2073 CanonicalUrlTargets canonicalUrlTargets = 2074 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2075 2076 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2077 StringBuilder canonicalUrlQuery = new StringBuilder(); 2078 canonicalUrlQuery 2079 .append("SELECT ") 2080 .append(fieldsToLoadFromSpidxUriTable) 2081 .append(' '); 2082 canonicalUrlQuery.append("FROM hfj_res_link r "); 2083 2084 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2085 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2086 if (myPartitionSettings.isDatabasePartitionMode()) { 2087 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2088 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); 2089 } 2090 if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { 2091 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2092 canonicalUriQueryParams.put( 2093 "uri_identity_hash", 2094 canonicalUrlTargets.myHashIdentityValues.iterator().next()); 2095 } else { 2096 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2097 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); 2098 } 2099 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2100 canonicalUrlQuery.append(")"); 2101 2102 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2103 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2104 canonicalUrlQuery.append(" AND"); 2105 if (myPartitionSettings.isDatabasePartitionMode()) { 2106 if (theReverse) { 2107 canonicalUrlQuery.append(" rUri.partition_id"); 2108 } else { 2109 canonicalUrlQuery.append(" r.partition_id"); 2110 } 2111 canonicalUrlQuery.append(" = :search_partition_id"); 2112 canonicalUrlQuery.append(" AND"); 2113 } 2114 if (theReverse) { 2115 canonicalUrlQuery.append(" rUri.res_id"); 2116 } else { 2117 canonicalUrlQuery.append(" r.src_resource_id"); 2118 } 2119 canonicalUrlQuery.append(" IN (:target_pids)"); 2120 2121 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2122 } 2123 2124 @Nonnull 2125 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2126 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2127 Set<String> targetResourceTypes = theTargetResourceTypes; 2128 if (targetResourceTypes == null) { 2129 /* 2130 * If we don't have a list of valid target types, we need to figure out a list of all 2131 * possible target types in order to perform the search of the URI index table. This is 2132 * because the hash_identity column encodes the resource type, so we'll need a hash 2133 * value for each possible target type. 2134 */ 2135 targetResourceTypes = new HashSet<>(); 2136 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2137 if (theReverse) { 2138 // For reverse includes, it is really hard to figure out what types 2139 // are actually potentially pointing to the type we're searching for 2140 // in this context, so let's just assume it could be anything. 2141 targetResourceTypes = possibleTypes; 2142 } else { 2143 for (var next : mySearchParamRegistry 2144 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2145 .values() 2146 .stream() 2147 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2148 .collect(Collectors.toList())) { 2149 2150 // If the reference points to a Reference (ie not a canonical or CanonicalReference) 2151 // then it doesn't matter here anyhow. The logic here only works for elements at the 2152 // root level of the document (e.g. QuestionnaireResponse.subject or 2153 // QuestionnaireResponse.subject.where(...)) but this is just an optimization 2154 // anyhow. 2155 if (next.getPath().startsWith(myResourceName + ".")) { 2156 String elementName = 2157 next.getPath().substring(next.getPath().indexOf('.') + 1); 2158 int secondDotIndex = elementName.indexOf('.'); 2159 if (secondDotIndex != -1) { 2160 elementName = elementName.substring(0, secondDotIndex); 2161 } 2162 BaseRuntimeChildDefinition child = 2163 myContext.getResourceDefinition(myResourceName).getChildByName(elementName); 2164 if (child != null) { 2165 BaseRuntimeElementDefinition<?> childDef = child.getChildByName(elementName); 2166 if (childDef != null) { 2167 if (childDef.getName().equals("Reference")) { 2168 continue; 2169 } 2170 } 2171 } 2172 } 2173 2174 if (!next.getTargets().isEmpty()) { 2175 // For each reference parameter on the resource type we're searching for, 2176 // add all the potential target types to the list of possible target 2177 // resource types we can look up. 2178 for (var nextTarget : next.getTargets()) { 2179 if (possibleTypes.contains(nextTarget)) { 2180 targetResourceTypes.add(nextTarget); 2181 } 2182 } 2183 } else { 2184 // If we have any references that don't define any target types, then 2185 // we need to assume that all enabled resource types are possible target 2186 // types 2187 targetResourceTypes.addAll(possibleTypes); 2188 break; 2189 } 2190 } 2191 } 2192 } 2193 assert !targetResourceTypes.isEmpty(); 2194 2195 Set<Long> hashIdentityValues = new HashSet<>(); 2196 Set<Integer> partitionIds = new HashSet<>(); 2197 for (String type : targetResourceTypes) { 2198 2199 RequestPartitionId readPartition; 2200 if (myPartitionSettings.isPartitioningEnabled()) { 2201 readPartition = 2202 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2203 } else { 2204 readPartition = RequestPartitionId.defaultPartition(); 2205 } 2206 if (readPartition.hasPartitionIds()) { 2207 partitionIds.addAll(readPartition.getPartitionIds()); 2208 } 2209 2210 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2211 myPartitionSettings, readPartition, type, "url"); 2212 hashIdentityValues.add(hashIdentity); 2213 } 2214 2215 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2216 } 2217 2218 static class CanonicalUrlTargets { 2219 2220 @Nonnull 2221 final Set<Long> myHashIdentityValues; 2222 2223 @Nonnull 2224 final Set<Integer> myPartitionIds; 2225 2226 public CanonicalUrlTargets(@Nonnull Set<Long> theHashIdentityValues, @Nonnull Set<Integer> thePartitionIds) { 2227 myHashIdentityValues = theHashIdentityValues; 2228 myPartitionIds = thePartitionIds; 2229 } 2230 } 2231 2232 /** 2233 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2234 * those pids where: 2235 * <ul> 2236 * <li>No single list is most than {@literal theMaxLoad} entries</li> 2237 * <li>Each list only contains JpaPids with the same partition ID</li> 2238 * </ul> 2239 */ 2240 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2241 2242 if (theNextRoundMatches.size() <= theMaxLoad) { 2243 boolean allSamePartition = true; 2244 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2245 if (!Objects.equals( 2246 theNextRoundMatches.get(i - 1).getPartitionId(), 2247 theNextRoundMatches.get(i).getPartitionId())) { 2248 allSamePartition = false; 2249 break; 2250 } 2251 } 2252 if (allSamePartition) { 2253 return Collections.singletonList(theNextRoundMatches); 2254 } 2255 } 2256 2257 // Break into partitioned sublists 2258 ListMultimap<String, JpaPid> lists = 2259 MultimapBuilder.hashKeys().arrayListValues().build(); 2260 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2261 String partitionId = nextRoundMatch.getPartitionId() != null 2262 ? nextRoundMatch.getPartitionId().toString() 2263 : ""; 2264 lists.put(partitionId, nextRoundMatch); 2265 } 2266 2267 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2268 for (String key : lists.keySet()) { 2269 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2270 retVal.addAll(nextPartition); 2271 } 2272 2273 // In unit test mode, we sort the results just for unit test predictability 2274 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2275 retVal = retVal.stream() 2276 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2277 .collect(Collectors.toList()); 2278 } 2279 2280 return retVal; 2281 } 2282 2283 private void attemptComboUniqueSpProcessing( 2284 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2285 RuntimeSearchParam comboParam = null; 2286 List<String> comboParamNames = null; 2287 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2288 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2289 if (!exactMatchParams.isEmpty()) { 2290 comboParam = exactMatchParams.get(0); 2291 comboParamNames = new ArrayList<>(theParams.keySet()); 2292 } 2293 2294 if (comboParam == null) { 2295 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2296 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2297 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2298 List<String> nextCandidateParamNames = 2299 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2300 .map(RuntimeSearchParam::getName) 2301 .collect(Collectors.toList()); 2302 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2303 comboParam = nextCandidate; 2304 comboParamNames = nextCandidateParamNames; 2305 break; 2306 } 2307 } 2308 } 2309 2310 if (comboParam != null) { 2311 Collections.sort(comboParamNames); 2312 2313 // Since we're going to remove elements below 2314 theParams.values().forEach(this::ensureSubListsAreWritable); 2315 2316 /* 2317 * Apply search against the combo param index in a loop: 2318 * 2319 * 1. First we check whether the actual parameter values in the 2320 * parameter map are actually usable for searching against the combo 2321 * param index. E.g. no search modifiers, date comparators, etc., 2322 * since these mean you can't use the combo index. 2323 * 2324 * 2. Apply and create the join SQl. We remove parameter values from 2325 * the map as we apply them, so any parameter values remaining in the 2326 * map after each loop haven't yet been factored into the SQL. 2327 * 2328 * The loop allows us to create multiple combo index joins if there 2329 * are multiple AND expressions for the related parameters. 2330 */ 2331 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2332 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2333 } 2334 } 2335 } 2336 2337 private void applyComboSearchParam( 2338 QueryStack theQueryStack, 2339 @Nonnull SearchParameterMap theParams, 2340 RequestDetails theRequest, 2341 List<String> theComboParamNames, 2342 RuntimeSearchParam theComboParam) { 2343 2344 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2345 for (String nextParamName : theComboParamNames) { 2346 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2347 inputs.add(nextValues); 2348 } 2349 2350 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2351 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2352 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2353 2354 StringBuilder searchStringBuilder = new StringBuilder(); 2355 searchStringBuilder.append(myResourceName); 2356 searchStringBuilder.append("?"); 2357 2358 boolean first = true; 2359 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2360 2361 String nextParamName = theComboParamNames.get(paramIndex); 2362 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2363 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2364 // As a result, we strip the prefix if present. 2365 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2366 2367 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2368 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2369 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2370 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2371 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2372 } 2373 } 2374 2375 if (first) { 2376 first = false; 2377 } else { 2378 searchStringBuilder.append('&'); 2379 } 2380 2381 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2382 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2383 2384 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2385 } 2386 2387 String indexString = searchStringBuilder.toString(); 2388 ourLog.debug( 2389 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2390 2391 indexStrings.add(indexString); 2392 } 2393 2394 // Just to make sure we're stable for tests 2395 indexStrings.sort(Comparator.naturalOrder()); 2396 2397 // Interceptor broadcast: JPA_PERFTRACE_INFO 2398 IInterceptorBroadcaster compositeBroadcaster = 2399 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2400 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2401 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2402 StorageProcessingMessage msg = new StorageProcessingMessage() 2403 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2404 + indexStringForLog); 2405 HookParams params = new HookParams() 2406 .add(RequestDetails.class, theRequest) 2407 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2408 .add(StorageProcessingMessage.class, msg); 2409 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2410 } 2411 2412 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2413 case UNIQUE: 2414 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2415 break; 2416 case NON_UNIQUE: 2417 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2418 break; 2419 } 2420 2421 // Remove any empty parameters remaining after this 2422 theParams.clean(); 2423 } 2424 2425 /** 2426 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2427 * searching against a combo param with the given parameter names. This might be {@literal false} if 2428 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2429 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2430 */ 2431 private boolean validateParamValuesAreValidForComboParam( 2432 RequestDetails theRequest, 2433 @Nonnull SearchParameterMap theParams, 2434 List<String> theComboParamNames, 2435 RuntimeSearchParam theComboParam) { 2436 boolean paramValuesAreValidForCombo = true; 2437 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2438 2439 for (String nextParamName : theComboParamNames) { 2440 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2441 2442 if (nextValues == null || nextValues.isEmpty()) { 2443 paramValuesAreValidForCombo = false; 2444 break; 2445 } 2446 2447 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2448 paramOrValues.add(nextAndValue); 2449 2450 for (IQueryParameterType nextOrValue : nextAndValue) { 2451 if (nextOrValue instanceof DateParam) { 2452 DateParam dateParam = (DateParam) nextOrValue; 2453 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2454 String message = "Search with params " + theComboParamNames 2455 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2456 + nextParamName + "'"; 2457 firePerformanceInfo(theRequest, message); 2458 paramValuesAreValidForCombo = false; 2459 break; 2460 } 2461 } 2462 if (nextOrValue instanceof BaseParamWithPrefix) { 2463 BaseParamWithPrefix<?> paramWithPrefix = (BaseParamWithPrefix<?>) nextOrValue; 2464 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2465 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2466 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2467 if (prefix != null && prefix != EQUAL) { 2468 String message = "Search with params " + theComboParamNames 2469 + " is not a candidate for combo searching - Parameter '" + nextParamName 2470 + "' has prefix: '" 2471 + paramWithPrefix.getPrefix().getValue() + "'"; 2472 firePerformanceInfo(theRequest, message); 2473 paramValuesAreValidForCombo = false; 2474 break; 2475 } 2476 } 2477 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2478 String message = "Search with params " + theComboParamNames 2479 + " is not a candidate for combo searching - Parameter '" + nextParamName 2480 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2481 firePerformanceInfo(theRequest, message); 2482 paramValuesAreValidForCombo = false; 2483 break; 2484 } 2485 } 2486 2487 // Reference params are only eligible for using a composite index if they 2488 // are qualified 2489 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2490 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2491 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2492 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2493 if (isBlank(param.getResourceType())) { 2494 ourLog.debug( 2495 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2496 paramValuesAreValidForCombo = false; 2497 break; 2498 } 2499 } 2500 2501 // Date params are not eligible for using composite unique index 2502 // as index could contain date with different precision (e.g. DAY, SECOND) 2503 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2504 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2505 ourLog.debug( 2506 "Search with params {} is not a candidate for combo searching - " 2507 + "Unique combo search parameter '{}' has DATE type", 2508 theComboParamNames, 2509 nextParamName); 2510 paramValuesAreValidForCombo = false; 2511 break; 2512 } 2513 } 2514 2515 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2516 ourLog.debug( 2517 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2518 paramValuesAreValidForCombo = false; 2519 } 2520 2521 return paramValuesAreValidForCombo; 2522 } 2523 2524 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2525 for (int i = 0; i < theListOfLists.size(); i++) { 2526 List<T> oldSubList = theListOfLists.get(i); 2527 if (!(oldSubList instanceof ArrayList)) { 2528 List<T> newSubList = new ArrayList<>(oldSubList); 2529 theListOfLists.set(i, newSubList); 2530 } 2531 } 2532 } 2533 2534 @Override 2535 public void setFetchSize(int theFetchSize) { 2536 myFetchSize = theFetchSize; 2537 } 2538 2539 public SearchParameterMap getParams() { 2540 return myParams; 2541 } 2542 2543 public CriteriaBuilder getBuilder() { 2544 return myCriteriaBuilder; 2545 } 2546 2547 public Class<? extends IBaseResource> getResourceType() { 2548 return myResourceType; 2549 } 2550 2551 public String getResourceName() { 2552 return myResourceName; 2553 } 2554 2555 /** 2556 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2557 */ 2558 public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2559 2560 private final RequestDetails myRequest; 2561 private final Set<JpaPid> myCurrentPids; 2562 private Iterator<JpaPid> myCurrentIterator; 2563 private JpaPid myNext; 2564 2565 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2566 myCurrentPids = new HashSet<>(thePidSet); 2567 myCurrentIterator = null; 2568 myRequest = theRequest; 2569 } 2570 2571 private void fetchNext() { 2572 while (myNext == null) { 2573 2574 if (myCurrentIterator == null) { 2575 Set<Include> includes = new HashSet<>(); 2576 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2577 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2578 for (IQueryParameterType type : typeList) { 2579 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2580 for (String resourceType : queryString.split(",")) { 2581 String rt = resourceType.trim(); 2582 if (isNotBlank(rt)) { 2583 includes.add(new Include(rt + ":*", true)); 2584 } 2585 } 2586 } 2587 } 2588 } 2589 if (includes.isEmpty()) { 2590 includes.add(new Include("*", true)); 2591 } 2592 Set<JpaPid> newPids = loadIncludes( 2593 myContext, 2594 myEntityManager, 2595 myCurrentPids, 2596 includes, 2597 false, 2598 getParams().getLastUpdated(), 2599 mySearchUuid, 2600 myRequest, 2601 null); 2602 myCurrentIterator = newPids.iterator(); 2603 } 2604 2605 if (myCurrentIterator.hasNext()) { 2606 myNext = myCurrentIterator.next(); 2607 } else { 2608 myNext = NO_MORE; 2609 } 2610 } 2611 } 2612 2613 @Override 2614 public boolean hasNext() { 2615 fetchNext(); 2616 return !NO_MORE.equals(myNext); 2617 } 2618 2619 @Override 2620 public JpaPid next() { 2621 fetchNext(); 2622 JpaPid retVal = myNext; 2623 myNext = null; 2624 return retVal; 2625 } 2626 } 2627 2628 /** 2629 * Basic Query iterator, used to fetch the results of a query. 2630 */ 2631 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2632 2633 private final SearchRuntimeDetails mySearchRuntimeDetails; 2634 private final RequestDetails myRequest; 2635 private final boolean myHaveRawSqlHooks; 2636 private final boolean myHavePerfTraceFoundIdHook; 2637 private final SortSpec mySort; 2638 private final Integer myOffset; 2639 private final IInterceptorBroadcaster myCompositeBroadcaster; 2640 private boolean myFirst = true; 2641 private IncludesIterator myIncludesIterator; 2642 /** 2643 * The next JpaPid value of the next result in this query. 2644 * Will not be null if fetched using getNext() 2645 */ 2646 private JpaPid myNext; 2647 /** 2648 * The current query result iterator running sql and supplying PIDs 2649 * @see #myQueryList 2650 */ 2651 private ISearchQueryExecutor myResultsIterator; 2652 2653 private boolean myFetchIncludesForEverythingOperation; 2654 /** 2655 * The count of resources skipped because they were seen in earlier results 2656 */ 2657 private int mySkipCount = 0; 2658 /** 2659 * The count of resources that are new in this search 2660 * (ie, not cached in previous searches) 2661 */ 2662 private int myNonSkipCount = 0; 2663 2664 /** 2665 * The list of queries to use to find all results. 2666 * Normal JPA queries will normally have a single entry. 2667 * Queries that involve Hibernate Search/Elastisearch may have 2668 * multiple queries because of chunking. 2669 * The $everything operation also jams some extra results in. 2670 */ 2671 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2672 2673 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2674 mySearchRuntimeDetails = theSearchRuntimeDetails; 2675 mySort = myParams.getSort(); 2676 myOffset = myParams.getOffset(); 2677 myRequest = theRequest; 2678 myCompositeBroadcaster = 2679 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2680 2681 // everything requires fetching recursively all related resources 2682 if (myParams.getEverythingMode() != null) { 2683 myFetchIncludesForEverythingOperation = true; 2684 } 2685 2686 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2687 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2688 } 2689 2690 private void fetchNext() { 2691 try { 2692 if (myHaveRawSqlHooks) { 2693 CurrentThreadCaptureQueriesListener.startCapturing(); 2694 } 2695 2696 // If we don't have a query yet, create one 2697 if (myResultsIterator == null) { 2698 if (!mySearchProperties.hasMaxResultsRequested()) { 2699 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2700 } 2701 2702 /* 2703 * assigns the results iterator 2704 * and populates the myQueryList. 2705 */ 2706 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2707 } 2708 2709 if (myNext == null) { 2710 // no next means we need a new query (if one is available) 2711 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2712 /* 2713 * Because we combine our DB searches with Lucene 2714 * sometimes we can have multiple results iterators 2715 * (with only some having data in them to extract). 2716 * 2717 * We'll iterate our results iterators until we 2718 * either run out of results iterators, or we 2719 * have one that actually has data in it. 2720 */ 2721 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2722 retrieveNextIteratorQuery(); 2723 } 2724 2725 if (!myResultsIterator.hasNext()) { 2726 // we couldn't find a results iterator; 2727 // we're done here 2728 break; 2729 } 2730 2731 JpaPid nextPid = myResultsIterator.next(); 2732 if (myHavePerfTraceFoundIdHook) { 2733 callPerformanceTracingHook(nextPid); 2734 } 2735 2736 if (nextPid != null) { 2737 if (!myPidSet.contains(nextPid)) { 2738 if (!mySearchProperties.isDeduplicateInDatabase()) { 2739 /* 2740 * We only add to the map if we aren't fetching "everything"; 2741 * otherwise, we let the de-duplication happen in the database 2742 * (see createChunkedQueryNormalSearch above), because it 2743 * saves memory that way. 2744 */ 2745 myPidSet.add(nextPid); 2746 } 2747 if (doNotSkipNextPidForEverything()) { 2748 myNext = nextPid; 2749 myNonSkipCount++; 2750 break; 2751 } 2752 } else { 2753 mySkipCount++; 2754 } 2755 } 2756 2757 if (!myResultsIterator.hasNext()) { 2758 if (mySearchProperties.hasMaxResultsRequested() 2759 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2760 if (mySkipCount > 0 && myNonSkipCount == 0) { 2761 sendProcessingMsgAndFirePerformanceHook(); 2762 // need the next iterator; increase the maxsize 2763 // (we should always do this) 2764 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2765 mySearchProperties.setMaxResultsRequested(maxResults); 2766 2767 if (!mySearchProperties.isDeduplicateInDatabase()) { 2768 // if we're not using the database to deduplicate 2769 // we should recheck our memory usage 2770 // the prefetch size check is future proofing 2771 int prefetchSize = myStorageSettings 2772 .getSearchPreFetchThresholds() 2773 .size(); 2774 if (prefetchSize > 0) { 2775 if (myStorageSettings 2776 .getSearchPreFetchThresholds() 2777 .get(prefetchSize - 1) 2778 < mySearchProperties.getMaxResultsRequested()) { 2779 mySearchProperties.setDeduplicateInDatabase(true); 2780 } 2781 } 2782 } 2783 2784 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2785 } 2786 } 2787 } 2788 } 2789 } 2790 2791 if (myNext == null) { 2792 // if we got here, it means the current JpaPid has already been processed, 2793 // and we will decide (here) if we need to fetch related resources recursively 2794 if (myFetchIncludesForEverythingOperation) { 2795 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2796 myFetchIncludesForEverythingOperation = false; 2797 } 2798 if (myIncludesIterator != null) { 2799 while (myIncludesIterator.hasNext()) { 2800 JpaPid next = myIncludesIterator.next(); 2801 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2802 myNext = next; 2803 break; 2804 } 2805 } 2806 if (myNext == null) { 2807 myNext = NO_MORE; 2808 } 2809 } else { 2810 myNext = NO_MORE; 2811 } 2812 } 2813 2814 if (!mySearchProperties.hasMaxResultsRequested()) { 2815 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2816 } else { 2817 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2818 } 2819 2820 } finally { 2821 // search finished - fire hooks 2822 if (myHaveRawSqlHooks) { 2823 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2824 } 2825 } 2826 2827 if (myFirst) { 2828 HookParams params = new HookParams() 2829 .add(RequestDetails.class, myRequest) 2830 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2831 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2832 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2833 myFirst = false; 2834 } 2835 2836 if (NO_MORE.equals(myNext)) { 2837 HookParams params = new HookParams() 2838 .add(RequestDetails.class, myRequest) 2839 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2840 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2841 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2842 } 2843 } 2844 2845 private Integer calculateMaxResultsToFetch() { 2846 if (myParams.getLoadSynchronousUpTo() != null) { 2847 return myParams.getLoadSynchronousUpTo(); 2848 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2849 return myParams.getEverythingMode() != null 2850 ? myParams.getOffset() + myParams.getCount() 2851 : myParams.getCount(); 2852 } else { 2853 return myStorageSettings.getFetchSizeDefaultMaximum(); 2854 } 2855 } 2856 2857 private boolean doNotSkipNextPidForEverything() { 2858 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2859 } 2860 2861 private void callPerformanceTracingHook(JpaPid theNextPid) { 2862 HookParams params = new HookParams() 2863 .add(Integer.class, System.identityHashCode(this)) 2864 .add(Object.class, theNextPid); 2865 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2866 } 2867 2868 private void sendProcessingMsgAndFirePerformanceHook() { 2869 String msg = "Pass completed with no matching results seeking rows " 2870 + myPidSet.size() + "-" + mySkipCount 2871 + ". This indicates an inefficient query! Retrying with new max count of " 2872 + mySearchProperties.getMaxResultsRequested(); 2873 firePerformanceWarning(myRequest, msg); 2874 } 2875 2876 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2877 Integer offset = theOffset; 2878 if (myQueryList.isEmpty()) { 2879 // Capture times for Lucene/Elasticsearch queries as well 2880 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2881 2882 // setting offset to 0 to fetch all resource ids to guarantee 2883 // correct output result for everything operation during paging 2884 if (myParams.getEverythingMode() != null) { 2885 offset = 0; 2886 } 2887 2888 SearchQueryProperties properties = mySearchProperties.clone(); 2889 properties 2890 .setOffset(offset) 2891 .setMaxResultsRequested(theMaxResultsToFetch) 2892 .setDoCountOnlyFlag(false) 2893 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2894 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2895 } 2896 2897 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2898 2899 retrieveNextIteratorQuery(); 2900 2901 mySkipCount = 0; 2902 myNonSkipCount = 0; 2903 } 2904 2905 private void retrieveNextIteratorQuery() { 2906 close(); 2907 if (isNotEmpty(myQueryList)) { 2908 myResultsIterator = myQueryList.remove(0); 2909 myHasNextIteratorQuery = true; 2910 } else { 2911 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2912 myHasNextIteratorQuery = false; 2913 } 2914 } 2915 2916 @Override 2917 public boolean hasNext() { 2918 if (myNext == null) { 2919 fetchNext(); 2920 } 2921 return !NO_MORE.equals(myNext); 2922 } 2923 2924 @Override 2925 public JpaPid next() { 2926 fetchNext(); 2927 JpaPid retVal = myNext; 2928 myNext = null; 2929 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2930 return retVal; 2931 } 2932 2933 @Override 2934 public int getSkippedCount() { 2935 return mySkipCount; 2936 } 2937 2938 @Override 2939 public int getNonSkippedCount() { 2940 return myNonSkipCount; 2941 } 2942 2943 @Override 2944 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2945 Collection<JpaPid> batch = new ArrayList<>(); 2946 while (this.hasNext() && batch.size() < theBatchSize) { 2947 batch.add(this.next()); 2948 } 2949 return batch; 2950 } 2951 2952 @Override 2953 public void close() { 2954 if (myResultsIterator != null) { 2955 myResultsIterator.close(); 2956 } 2957 myResultsIterator = null; 2958 } 2959 } 2960 2961 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2962 // Only log at debug level since these messages aren't considered important enough 2963 // that we should be cluttering the system log, but they are important to the 2964 // specific query being executed to we'll INFO level them there 2965 ourLog.debug(theMessage); 2966 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2967 } 2968 2969 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2970 ourLog.warn(theMessage); 2971 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2972 } 2973 2974 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 2975 IInterceptorBroadcaster compositeBroadcaster = 2976 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2977 if (compositeBroadcaster.hasHooks(thePointcut)) { 2978 StorageProcessingMessage message = new StorageProcessingMessage(); 2979 message.setMessage(theMessage); 2980 HookParams params = new HookParams() 2981 .add(RequestDetails.class, theRequest) 2982 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2983 .add(StorageProcessingMessage.class, message); 2984 compositeBroadcaster.callHooks(thePointcut, params); 2985 } 2986 } 2987 2988 public static int getMaximumPageSize() { 2989 if (myMaxPageSizeForTests != null) { 2990 return myMaxPageSizeForTests; 2991 } 2992 return MAXIMUM_PAGE_SIZE; 2993 } 2994 2995 public static void setMaxPageSizeForTest(Integer theTestSize) { 2996 myMaxPageSizeForTests = theTestSize; 2997 } 2998}