001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2025 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.BaseRuntimeChildDefinition; 023import ca.uhn.fhir.context.BaseRuntimeElementDefinition; 024import ca.uhn.fhir.context.ComboSearchParamType; 025import ca.uhn.fhir.context.FhirContext; 026import ca.uhn.fhir.context.FhirVersionEnum; 027import ca.uhn.fhir.context.RuntimeResourceDefinition; 028import ca.uhn.fhir.context.RuntimeSearchParam; 029import ca.uhn.fhir.i18n.Msg; 030import ca.uhn.fhir.interceptor.api.HookParams; 031import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 032import ca.uhn.fhir.interceptor.api.Pointcut; 033import ca.uhn.fhir.interceptor.model.RequestPartitionId; 034import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 035import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 036import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 037import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 038import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 039import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 040import ca.uhn.fhir.jpa.dao.BaseStorageDao; 041import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 042import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 043import ca.uhn.fhir.jpa.dao.IResultIterator; 044import ca.uhn.fhir.jpa.dao.ISearchBuilder; 045import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao; 046import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTagDao; 047import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 048import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 049import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 050import ca.uhn.fhir.jpa.model.config.PartitionSettings; 051import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 052import ca.uhn.fhir.jpa.model.dao.JpaPid; 053import ca.uhn.fhir.jpa.model.dao.JpaPidFk; 054import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 055import ca.uhn.fhir.jpa.model.entity.BaseTag; 056import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; 057import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTablePk; 058import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTag; 059import ca.uhn.fhir.jpa.model.entity.ResourceTag; 060import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 061import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 062import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 063import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc; 064import ca.uhn.fhir.jpa.search.SearchConstants; 065import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.models.SearchQueryProperties; 067import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 068import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 069import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 070import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 071import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 072import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 073import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 074import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 075import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 076import ca.uhn.fhir.jpa.util.BaseIterator; 077import ca.uhn.fhir.jpa.util.CartesianProductUtil; 078import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 079import ca.uhn.fhir.jpa.util.QueryChunker; 080import ca.uhn.fhir.jpa.util.SqlQueryList; 081import ca.uhn.fhir.model.api.IQueryParameterType; 082import ca.uhn.fhir.model.api.Include; 083import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 084import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 085import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 086import ca.uhn.fhir.rest.api.Constants; 087import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 088import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 089import ca.uhn.fhir.rest.api.SortOrderEnum; 090import ca.uhn.fhir.rest.api.SortSpec; 091import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 092import ca.uhn.fhir.rest.api.server.RequestDetails; 093import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 094import ca.uhn.fhir.rest.param.DateParam; 095import ca.uhn.fhir.rest.param.DateRangeParam; 096import ca.uhn.fhir.rest.param.ParamPrefixEnum; 097import ca.uhn.fhir.rest.param.ParameterUtil; 098import ca.uhn.fhir.rest.param.ReferenceParam; 099import ca.uhn.fhir.rest.param.StringParam; 100import ca.uhn.fhir.rest.param.TokenParam; 101import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 102import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 103import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 104import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 105import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 106import ca.uhn.fhir.system.HapiSystemProperties; 107import ca.uhn.fhir.util.StopWatch; 108import ca.uhn.fhir.util.StringUtil; 109import ca.uhn.fhir.util.UrlUtil; 110import com.google.common.annotations.VisibleForTesting; 111import com.google.common.collect.ListMultimap; 112import com.google.common.collect.Lists; 113import com.google.common.collect.MultimapBuilder; 114import com.healthmarketscience.sqlbuilder.Condition; 115import jakarta.annotation.Nonnull; 116import jakarta.annotation.Nullable; 117import jakarta.persistence.EntityManager; 118import jakarta.persistence.PersistenceContext; 119import jakarta.persistence.PersistenceContextType; 120import jakarta.persistence.Query; 121import jakarta.persistence.Tuple; 122import jakarta.persistence.TypedQuery; 123import jakarta.persistence.criteria.CriteriaBuilder; 124import org.apache.commons.collections4.ListUtils; 125import org.apache.commons.lang3.StringUtils; 126import org.apache.commons.lang3.Validate; 127import org.apache.commons.lang3.math.NumberUtils; 128import org.apache.commons.lang3.tuple.Pair; 129import org.hl7.fhir.instance.model.api.IAnyResource; 130import org.hl7.fhir.instance.model.api.IBaseResource; 131import org.hl7.fhir.instance.model.api.IIdType; 132import org.slf4j.Logger; 133import org.slf4j.LoggerFactory; 134import org.springframework.beans.factory.annotation.Autowired; 135import org.springframework.jdbc.core.JdbcTemplate; 136import org.springframework.transaction.support.TransactionSynchronizationManager; 137 138import java.util.ArrayList; 139import java.util.Collection; 140import java.util.Collections; 141import java.util.Comparator; 142import java.util.HashMap; 143import java.util.HashSet; 144import java.util.Iterator; 145import java.util.LinkedList; 146import java.util.List; 147import java.util.Map; 148import java.util.Objects; 149import java.util.Set; 150import java.util.stream.Collectors; 151 152import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 153import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 154import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 155import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 156import static ca.uhn.fhir.rest.param.ParamPrefixEnum.EQUAL; 157import static java.util.Objects.requireNonNull; 158import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 159import static org.apache.commons.lang3.StringUtils.defaultString; 160import static org.apache.commons.lang3.StringUtils.isBlank; 161import static org.apache.commons.lang3.StringUtils.isNotBlank; 162import static org.apache.commons.lang3.StringUtils.stripStart; 163 164/** 165 * The SearchBuilder is responsible for actually forming the SQL query that handles 166 * searches for resources 167 */ 168public class SearchBuilder implements ISearchBuilder<JpaPid> { 169 170 /** 171 * See loadResourcesByPid 172 * for an explanation of why we use the constant 800 173 */ 174 // NB: keep public 175 @Deprecated 176 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 177 178 public static final String RESOURCE_ID_ALIAS = "resource_id"; 179 public static final String PARTITION_ID_ALIAS = "partition_id"; 180 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 181 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 182 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 183 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 184 private static final String MY_SOURCE_RESOURCE_PARTITION_ID = "myPartitionIdValue"; 185 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 186 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 187 private static final String MY_TARGET_RESOURCE_PARTITION_ID = "myTargetResourcePartitionId"; 188 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 189 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 190 public static final JpaPid[] EMPTY_JPA_PID_ARRAY = new JpaPid[0]; 191 public static boolean myUseMaxPageSize50ForTest = false; 192 public static Integer myMaxPageSizeForTests = null; 193 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 194 protected final IResourceTagDao myResourceTagDao; 195 private String myResourceName; 196 private final Class<? extends IBaseResource> myResourceType; 197 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 198 private final SqlObjectFactory mySqlBuilderFactory; 199 private final HibernatePropertiesProvider myDialectProvider; 200 private final ISearchParamRegistry mySearchParamRegistry; 201 private final PartitionSettings myPartitionSettings; 202 private final DaoRegistry myDaoRegistry; 203 private final FhirContext myContext; 204 private final IIdHelperService<JpaPid> myIdHelperService; 205 private final JpaStorageSettings myStorageSettings; 206 207 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 208 protected EntityManager myEntityManager; 209 210 private CriteriaBuilder myCriteriaBuilder; 211 private SearchParameterMap myParams; 212 private String mySearchUuid; 213 private int myFetchSize; 214 215 private boolean myRequiresTotal; 216 217 /** 218 * @see SearchBuilder#setDeduplicateInDatabase(boolean) 219 */ 220 private Set<JpaPid> myPidSet; 221 222 private boolean myHasNextIteratorQuery = false; 223 private RequestPartitionId myRequestPartitionId; 224 225 private SearchQueryProperties mySearchProperties; 226 227 @Autowired(required = false) 228 private IFulltextSearchSvc myFulltextSearchSvc; 229 230 @Autowired(required = false) 231 private IElasticsearchSvc myIElasticsearchSvc; 232 233 @Autowired 234 private IJpaStorageResourceParser myJpaStorageResourceParser; 235 236 @Autowired 237 private IResourceHistoryTableDao myResourceHistoryTableDao; 238 239 @Autowired 240 private IResourceHistoryTagDao myResourceHistoryTagDao; 241 242 @Autowired 243 private IRequestPartitionHelperSvc myPartitionHelperSvc; 244 245 /** 246 * Constructor 247 */ 248 @SuppressWarnings({"rawtypes", "unchecked"}) 249 public SearchBuilder( 250 String theResourceName, 251 JpaStorageSettings theStorageSettings, 252 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 253 SqlObjectFactory theSqlBuilderFactory, 254 HibernatePropertiesProvider theDialectProvider, 255 ISearchParamRegistry theSearchParamRegistry, 256 PartitionSettings thePartitionSettings, 257 IInterceptorBroadcaster theInterceptorBroadcaster, 258 IResourceTagDao theResourceTagDao, 259 DaoRegistry theDaoRegistry, 260 FhirContext theContext, 261 IIdHelperService theIdHelperService, 262 Class<? extends IBaseResource> theResourceType) { 263 myResourceName = theResourceName; 264 myResourceType = theResourceType; 265 myStorageSettings = theStorageSettings; 266 267 myEntityManagerFactory = theEntityManagerFactory; 268 mySqlBuilderFactory = theSqlBuilderFactory; 269 myDialectProvider = theDialectProvider; 270 mySearchParamRegistry = theSearchParamRegistry; 271 myPartitionSettings = thePartitionSettings; 272 myInterceptorBroadcaster = theInterceptorBroadcaster; 273 myResourceTagDao = theResourceTagDao; 274 myDaoRegistry = theDaoRegistry; 275 myContext = theContext; 276 myIdHelperService = theIdHelperService; 277 278 mySearchProperties = new SearchQueryProperties(); 279 } 280 281 @VisibleForTesting 282 void setResourceName(String theName) { 283 myResourceName = theName; 284 } 285 286 @Override 287 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 288 mySearchProperties.setMaxResultsRequested(theMaxResultsToFetch); 289 } 290 291 @Override 292 public void setDeduplicateInDatabase(boolean theShouldDeduplicateInDB) { 293 mySearchProperties.setDeduplicateInDatabase(theShouldDeduplicateInDB); 294 } 295 296 @Override 297 public void setRequireTotal(boolean theRequireTotal) { 298 myRequiresTotal = theRequireTotal; 299 } 300 301 @Override 302 public boolean requiresTotal() { 303 return myRequiresTotal; 304 } 305 306 private void searchForIdsWithAndOr( 307 SearchQueryBuilder theSearchSqlBuilder, 308 QueryStack theQueryStack, 309 @Nonnull SearchParameterMap theParams, 310 RequestDetails theRequest) { 311 myParams = theParams; 312 mySearchProperties.setSortSpec(myParams.getSort()); 313 314 // Remove any empty parameters 315 theParams.clean(); 316 317 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 318 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 319 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 320 } 321 322 // Attempt to lookup via composite unique key. 323 if (isCompositeUniqueSpCandidate()) { 324 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 325 } 326 327 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 328 List<String> paramNames = myParams.keySet().stream() 329 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 330 .filter(t -> !t.equals(Constants.PARAM_TAG)) 331 .collect(Collectors.toList()); 332 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 333 paramNames.add(IAnyResource.SP_RES_ID); 334 } 335 if (myParams.containsKey(Constants.PARAM_TAG)) { 336 paramNames.add(Constants.PARAM_TAG); 337 } 338 339 // Handle each parameter 340 for (String nextParamName : paramNames) { 341 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 342 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 343 // Elasticsearch 344 continue; 345 } 346 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 347 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 348 .setParamName(nextParamName) 349 .setAndOrParams(andOrParams) 350 .setRequest(theRequest) 351 .setRequestPartitionId(myRequestPartitionId)); 352 if (predicate != null) { 353 theSearchSqlBuilder.addPredicate(predicate); 354 } 355 } 356 } 357 358 /** 359 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 360 * parameters all have no modifiers. 361 */ 362 private boolean isCompositeUniqueSpCandidate() { 363 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 364 } 365 366 @SuppressWarnings("ConstantConditions") 367 @Override 368 public Long createCountQuery( 369 SearchParameterMap theParams, 370 String theSearchUuid, 371 RequestDetails theRequest, 372 @Nonnull RequestPartitionId theRequestPartitionId) { 373 374 assert theRequestPartitionId != null; 375 assert TransactionSynchronizationManager.isActualTransactionActive(); 376 377 init(theParams, theSearchUuid, theRequestPartitionId); 378 379 if (checkUseHibernateSearch()) { 380 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 381 } 382 383 SearchQueryProperties properties = mySearchProperties.clone(); 384 properties.setDoCountOnlyFlag(true); 385 properties.setSortSpec(null); // counts don't require sorts 386 properties.setMaxResultsRequested(null); 387 properties.setOffset(null); 388 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), properties, theRequest, null); 389 if (queries.isEmpty()) { 390 return 0L; 391 } else { 392 JpaPid jpaPid = queries.get(0).next(); 393 return jpaPid.getId(); 394 } 395 } 396 397 /** 398 * @param thePidSet May be null 399 */ 400 @Override 401 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 402 myPidSet = new HashSet<>(thePidSet); 403 } 404 405 @SuppressWarnings("ConstantConditions") 406 @Override 407 public IResultIterator<JpaPid> createQuery( 408 SearchParameterMap theParams, 409 SearchRuntimeDetails theSearchRuntimeDetails, 410 RequestDetails theRequest, 411 @Nonnull RequestPartitionId theRequestPartitionId) { 412 assert theRequestPartitionId != null; 413 assert TransactionSynchronizationManager.isActualTransactionActive(); 414 415 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 416 417 if (myPidSet == null) { 418 myPidSet = new HashSet<>(); 419 } 420 421 return new QueryIterator(theSearchRuntimeDetails, theRequest); 422 } 423 424 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 425 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 426 // we mutate the params. Make a private copy. 427 myParams = theParams.clone(); 428 mySearchProperties.setSortSpec(myParams.getSort()); 429 mySearchUuid = theSearchUuid; 430 myRequestPartitionId = theRequestPartitionId; 431 } 432 433 /** 434 * The query created can be either a count query or the 435 * actual query. 436 * This is why it takes a SearchQueryProperties object 437 * (and doesn't use the local version of it). 438 * The properties may differ slightly for whichever 439 * query this is. 440 */ 441 private List<ISearchQueryExecutor> createQuery( 442 SearchParameterMap theParams, 443 SearchQueryProperties theSearchProperties, 444 RequestDetails theRequest, 445 SearchRuntimeDetails theSearchRuntimeDetails) { 446 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 447 448 if (checkUseHibernateSearch()) { 449 // we're going to run at least part of the search against the Fulltext service. 450 451 // Ugh - we have two different return types for now 452 ISearchQueryExecutor fulltextExecutor = null; 453 List<JpaPid> fulltextMatchIds = null; 454 int resultCount = 0; 455 if (myParams.isLastN()) { 456 fulltextMatchIds = executeLastNAgainstIndex(theRequest, theSearchProperties.getMaxResultsRequested()); 457 resultCount = fulltextMatchIds.size(); 458 } else if (myParams.getEverythingMode() != null) { 459 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 460 resultCount = fulltextMatchIds.size(); 461 } else { 462 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 463 // enabled SP indexing). 464 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 465 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 466 // extra work in Elastic. 467 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 468 469 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 470 // a page. 471 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 472 } 473 474 if (fulltextExecutor == null) { 475 fulltextExecutor = 476 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 477 } 478 479 if (theSearchRuntimeDetails != null) { 480 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 481 IInterceptorBroadcaster compositeBroadcaster = 482 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 483 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)) { 484 HookParams params = new HookParams() 485 .add(RequestDetails.class, theRequest) 486 .addIfMatchesType(ServletRequestDetails.class, theRequest) 487 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 488 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params); 489 } 490 } 491 492 // can we skip the database entirely and return the pid list from here? 493 boolean canSkipDatabase = 494 // if we processed an AND clause, and it returned nothing, then nothing can match. 495 !fulltextExecutor.hasNext() 496 || 497 // Our hibernate search query doesn't respect partitions yet 498 (!myPartitionSettings.isPartitioningEnabled() 499 && 500 // were there AND terms left? Then we still need the db. 501 theParams.isEmpty() 502 && 503 // not every param is a param. :-( 504 theParams.getNearDistanceParam() == null 505 && 506 // todo MB don't we support _lastUpdated and _offset now? 507 theParams.getLastUpdated() == null 508 && theParams.getEverythingMode() == null 509 && theParams.getOffset() == null); 510 511 if (canSkipDatabase) { 512 ourLog.trace("Query finished after HSearch. Skip db query phase"); 513 if (theSearchProperties.hasMaxResultsRequested()) { 514 fulltextExecutor = SearchQueryExecutors.limited( 515 fulltextExecutor, theSearchProperties.getMaxResultsRequested()); 516 } 517 queries.add(fulltextExecutor); 518 } else { 519 ourLog.trace("Query needs db after HSearch. Chunking."); 520 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 521 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 522 new QueryChunker<JpaPid>() 523 .chunk( 524 fulltextExecutor, 525 SearchBuilder.getMaximumPageSize(), 526 // for each list of (SearchBuilder.getMaximumPageSize()) 527 // we create a chunked query and add it to 'queries' 528 t -> doCreateChunkedQueries(theParams, t, theSearchProperties, theRequest, queries)); 529 } 530 } else { 531 // do everything in the database. 532 createChunkedQuery(theParams, theSearchProperties, theRequest, null, queries); 533 } 534 535 return queries; 536 } 537 538 /** 539 * Check to see if query should use Hibernate Search, and error if the query can't continue. 540 * 541 * @return true if the query should first be processed by Hibernate Search 542 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 543 */ 544 private boolean checkUseHibernateSearch() { 545 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 546 547 if (!fulltextEnabled) { 548 failIfUsed(Constants.PARAM_TEXT); 549 failIfUsed(Constants.PARAM_CONTENT); 550 } else { 551 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 552 final String paramName = sortSpec.getParamName(); 553 if (paramName.contains(".")) { 554 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 555 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 556 } 557 } 558 } 559 560 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 561 // can. 562 return fulltextEnabled 563 && myParams != null 564 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 565 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 566 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 567 } 568 569 private void failIfUsed(String theParamName) { 570 if (myParams.containsKey(theParamName)) { 571 throw new InvalidRequestException(Msg.code(1192) 572 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 573 } 574 } 575 576 private void failIfUsedWithChainedSort(String theParamName) { 577 if (myParams.containsKey(theParamName)) { 578 throw new InvalidRequestException(Msg.code(2524) 579 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 580 + theParamName); 581 } 582 } 583 584 private List<JpaPid> executeLastNAgainstIndex(RequestDetails theRequestDetails, Integer theMaximumResults) { 585 // Can we use our hibernate search generated index on resource to support lastN?: 586 if (myStorageSettings.isAdvancedHSearchIndexing()) { 587 if (myFulltextSearchSvc == null) { 588 throw new InvalidRequestException(Msg.code(2027) 589 + "LastN operation is not enabled on this service, can not process this request"); 590 } 591 return myFulltextSearchSvc.lastN(myParams, theMaximumResults).stream() 592 .map(t -> (JpaPid) t) 593 .collect(Collectors.toList()); 594 } else { 595 throw new InvalidRequestException( 596 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 597 } 598 } 599 600 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 601 JpaPid pid = null; 602 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 603 String idParamValue; 604 IQueryParameterType idParam = 605 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 606 if (idParam instanceof TokenParam) { 607 TokenParam idParm = (TokenParam) idParam; 608 idParamValue = idParm.getValue(); 609 } else { 610 StringParam idParm = (StringParam) idParam; 611 idParamValue = idParm.getValue(); 612 } 613 614 pid = myIdHelperService 615 .resolveResourceIdentity( 616 myRequestPartitionId, 617 myResourceName, 618 idParamValue, 619 ResolveIdentityMode.includeDeleted().cacheOk()) 620 .getPersistentId(); 621 } 622 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 623 } 624 625 private void doCreateChunkedQueries( 626 SearchParameterMap theParams, 627 List<JpaPid> thePids, 628 SearchQueryProperties theSearchQueryProperties, 629 RequestDetails theRequest, 630 ArrayList<ISearchQueryExecutor> theQueries) { 631 632 if (thePids.size() < getMaximumPageSize()) { 633 thePids = normalizeIdListForInClause(thePids); 634 } 635 theSearchQueryProperties.setMaxResultsRequested(thePids.size()); 636 createChunkedQuery(theParams, theSearchQueryProperties, theRequest, thePids, theQueries); 637 } 638 639 /** 640 * Combs through the params for any _id parameters and extracts the PIDs for them 641 */ 642 private void extractTargetPidsFromIdParams(Set<JpaPid> theTargetPids) { 643 // get all the IQueryParameterType objects 644 // for _id -> these should all be StringParam values 645 HashSet<IIdType> ids = new HashSet<>(); 646 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 647 for (List<IQueryParameterType> paramList : params) { 648 for (IQueryParameterType param : paramList) { 649 String id; 650 if (param instanceof StringParam) { 651 // we expect all _id values to be StringParams 652 id = ((StringParam) param).getValue(); 653 } else if (param instanceof TokenParam) { 654 id = ((TokenParam) param).getValue(); 655 } else { 656 // we do not expect the _id parameter to be a non-string value 657 throw new IllegalArgumentException( 658 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 659 } 660 661 IIdType idType = myContext.getVersion().newIdType(); 662 if (id.contains("/")) { 663 idType.setValue(id); 664 } else { 665 idType.setValue(myResourceName + "/" + id); 666 } 667 ids.add(idType); 668 } 669 } 670 671 // fetch our target Pids 672 // this will throw if an id is not found 673 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 674 myRequestPartitionId, 675 new ArrayList<>(ids), 676 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 677 678 // add the pids to targetPids 679 for (IResourceLookup<JpaPid> pid : idToIdentity.values()) { 680 theTargetPids.add(pid.getPersistentId()); 681 } 682 } 683 684 private void createChunkedQuery( 685 SearchParameterMap theParams, 686 SearchQueryProperties theSearchProperties, 687 RequestDetails theRequest, 688 List<JpaPid> thePidList, 689 List<ISearchQueryExecutor> theSearchQueryExecutors) { 690 if (myParams.getEverythingMode() != null) { 691 createChunkedQueryForEverythingSearch( 692 theRequest, theParams, theSearchProperties, thePidList, theSearchQueryExecutors); 693 } else { 694 createChunkedQueryNormalSearch( 695 theParams, theSearchProperties, theRequest, thePidList, theSearchQueryExecutors); 696 } 697 } 698 699 private void createChunkedQueryNormalSearch( 700 SearchParameterMap theParams, 701 SearchQueryProperties theSearchProperties, 702 RequestDetails theRequest, 703 List<JpaPid> thePidList, 704 List<ISearchQueryExecutor> theSearchQueryExecutors) { 705 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 706 myContext, 707 myStorageSettings, 708 myPartitionSettings, 709 myRequestPartitionId, 710 myResourceName, 711 mySqlBuilderFactory, 712 myDialectProvider, 713 theSearchProperties.isDoCountOnlyFlag()); 714 QueryStack queryStack3 = new QueryStack( 715 theRequest, 716 theParams, 717 myStorageSettings, 718 myContext, 719 sqlBuilder, 720 mySearchParamRegistry, 721 myPartitionSettings); 722 723 if (theParams.keySet().size() > 1 724 || theParams.getSort() != null 725 || theParams.keySet().contains(Constants.PARAM_HAS) 726 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 727 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 728 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 729 if (activeComboParams.isEmpty()) { 730 sqlBuilder.setNeedResourceTableRoot(true); 731 } 732 } 733 734 /* 735 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 736 * specific filters with ORs as their root from working around the natural resource type / deletion 737 * status / partition IDs built into queries. 738 */ 739 if (theParams.containsKey(Constants.PARAM_FILTER)) { 740 Condition partitionIdPredicate = sqlBuilder 741 .getOrCreateResourceTablePredicateBuilder() 742 .createPartitionIdPredicate(myRequestPartitionId); 743 if (partitionIdPredicate != null) { 744 sqlBuilder.addPredicate(partitionIdPredicate); 745 } 746 } 747 748 // Normal search 749 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 750 751 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 752 // partition ID predicate in that case. 753 if (!sqlBuilder.haveAtLeastOnePredicate()) { 754 Condition partitionIdPredicate = sqlBuilder 755 .getOrCreateResourceTablePredicateBuilder() 756 .createPartitionIdPredicate(myRequestPartitionId); 757 if (partitionIdPredicate != null) { 758 sqlBuilder.addPredicate(partitionIdPredicate); 759 } 760 } 761 762 // Add PID list predicate for full text search and/or lastn operation 763 addPidListPredicate(thePidList, sqlBuilder); 764 765 // Last updated 766 addLastUpdatePredicate(sqlBuilder); 767 768 /* 769 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 770 * to something needed to guarantee correct results. 771 * 772 * Why do we need it? Suppose for example, a query like: 773 * Observation?category=foo,bar,baz 774 * And suppose you have many resources that have all 3 of these category codes. In this case 775 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 776 * we may exhaust the query results without getting enough distinct results back. When that 777 * happens we re-run the query with a larger limit. Excluding results we already know about 778 * tries to ensure that we get new unique results. 779 * 780 * The challenge with that though is that lots of DBs have an issue with too many 781 * parameters in one query. So we only do this optimization if there aren't too 782 * many results. 783 */ 784 if (myHasNextIteratorQuery) { 785 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 786 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 787 } 788 } 789 790 /* 791 * If offset is present, we want to deduplicate the results by using GROUP BY; 792 * OR 793 * if the MaxResultsToFetch is null, we are requesting "everything", 794 * so we'll let the db do the deduplication (instead of in-memory) 795 */ 796 if (theSearchProperties.isDeduplicateInDatabase()) { 797 queryStack3.addGrouping(); 798 queryStack3.setUseAggregate(true); 799 } 800 801 /* 802 * Sort 803 * 804 * If we have a sort, we wrap the criteria search (the search that actually 805 * finds the appropriate resources) in an outer search which is then sorted 806 */ 807 if (theSearchProperties.hasSort()) { 808 assert !theSearchProperties.isDoCountOnlyFlag(); 809 810 createSort(queryStack3, theSearchProperties.getSortSpec(), theParams); 811 } 812 813 /* 814 * Now perform the search 815 */ 816 executeSearch(theSearchProperties, theSearchQueryExecutors, sqlBuilder); 817 } 818 819 private void executeSearch( 820 SearchQueryProperties theProperties, 821 List<ISearchQueryExecutor> theSearchQueryExecutors, 822 SearchQueryBuilder sqlBuilder) { 823 GeneratedSql generatedSql = 824 sqlBuilder.generate(theProperties.getOffset(), theProperties.getMaxResultsRequested()); 825 if (!generatedSql.isMatchNothing()) { 826 SearchQueryExecutor executor = 827 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, theProperties.getMaxResultsRequested()); 828 theSearchQueryExecutors.add(executor); 829 } 830 } 831 832 private void createChunkedQueryForEverythingSearch( 833 RequestDetails theRequest, 834 SearchParameterMap theParams, 835 SearchQueryProperties theSearchQueryProperties, 836 List<JpaPid> thePidList, 837 List<ISearchQueryExecutor> theSearchQueryExecutors) { 838 839 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 840 myContext, 841 myStorageSettings, 842 myPartitionSettings, 843 myRequestPartitionId, 844 null, 845 mySqlBuilderFactory, 846 myDialectProvider, 847 theSearchQueryProperties.isDoCountOnlyFlag()); 848 849 QueryStack queryStack3 = new QueryStack( 850 theRequest, 851 theParams, 852 myStorageSettings, 853 myContext, 854 sqlBuilder, 855 mySearchParamRegistry, 856 myPartitionSettings); 857 858 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theSearchQueryProperties.getMaxResultsRequested()); 859 860 Set<JpaPid> targetPids = new HashSet<>(); 861 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 862 863 extractTargetPidsFromIdParams(targetPids); 864 865 // add the target pids to our executors as the first 866 // results iterator to go through 867 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 868 } else { 869 // For Everything queries, we make the query root by the ResourceLink table, since this query 870 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 871 // the one problem with this approach is that it doesn't catch Patients that have absolutely 872 // nothing linked to them. So we do one additional query to make sure we catch those too. 873 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 874 myContext, 875 myStorageSettings, 876 myPartitionSettings, 877 myRequestPartitionId, 878 myResourceName, 879 mySqlBuilderFactory, 880 myDialectProvider, 881 theSearchQueryProperties.isDoCountOnlyFlag()); 882 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate( 883 theSearchQueryProperties.getOffset(), mySearchProperties.getMaxResultsRequested()); 884 String sql = allTargetsSql.getSql(); 885 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 886 887 List<JpaPid> output = 888 jdbcTemplate.query(sql, args, new JpaPidRowMapper(myPartitionSettings.isPartitioningEnabled())); 889 890 // we add a search executor to fetch unlinked patients first 891 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 892 } 893 894 List<String> typeSourceResources = new ArrayList<>(); 895 if (myParams.get(Constants.PARAM_TYPE) != null) { 896 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 897 } 898 899 queryStack3.addPredicateEverythingOperation( 900 myResourceName, typeSourceResources, targetPids.toArray(EMPTY_JPA_PID_ARRAY)); 901 902 // Add PID list predicate for full text search and/or lastn operation 903 addPidListPredicate(thePidList, sqlBuilder); 904 905 /* 906 * If offset is present, we want deduplicate the results by using GROUP BY 907 * ORDER BY is required to make sure we return unique results for each page 908 */ 909 if (theSearchQueryProperties.hasOffset()) { 910 queryStack3.addGrouping(); 911 queryStack3.addOrdering(); 912 queryStack3.setUseAggregate(true); 913 } 914 915 /* 916 * Now perform the search 917 */ 918 executeSearch(theSearchQueryProperties, theSearchQueryExecutors, sqlBuilder); 919 } 920 921 private void addPidListPredicate(List<JpaPid> thePidList, SearchQueryBuilder theSqlBuilder) { 922 if (thePidList != null && !thePidList.isEmpty()) { 923 theSqlBuilder.addResourceIdsPredicate(thePidList); 924 } 925 } 926 927 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 928 DateRangeParam lu = myParams.getLastUpdated(); 929 if (lu != null && !lu.isEmpty()) { 930 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 931 theSqlBuilder.addPredicate(lastUpdatedPredicates); 932 } 933 } 934 935 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 936 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 937 jdbcTemplate.setFetchSize(myFetchSize); 938 if (theMaximumResults != null) { 939 jdbcTemplate.setMaxRows(theMaximumResults); 940 } 941 return jdbcTemplate; 942 } 943 944 private Collection<String> extractTypeSourceResourcesFromParams() { 945 946 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 947 948 // first off, let's flatten the list of list 949 List<IQueryParameterType> iQueryParameterTypesList = 950 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 951 952 // then, extract all elements of each CSV into one big list 953 List<String> resourceTypes = iQueryParameterTypesList.stream() 954 .map(param -> ((StringParam) param).getValue()) 955 .map(csvString -> List.of(csvString.split(","))) 956 .flatMap(List::stream) 957 .collect(Collectors.toList()); 958 959 Set<String> knownResourceTypes = myContext.getResourceTypes(); 960 961 // remove leading/trailing whitespaces if any and remove duplicates 962 Set<String> retVal = new HashSet<>(); 963 964 for (String type : resourceTypes) { 965 String trimmed = type.trim(); 966 if (!knownResourceTypes.contains(trimmed)) { 967 throw new ResourceNotFoundException( 968 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 969 } 970 retVal.add(trimmed); 971 } 972 973 return retVal; 974 } 975 976 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 977 return myStorageSettings.isIndexOnContainedResources() 978 && theParams.values().stream() 979 .flatMap(Collection::stream) 980 .flatMap(Collection::stream) 981 .anyMatch(ReferenceParam.class::isInstance); 982 } 983 984 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 985 if (theSort == null || isBlank(theSort.getParamName())) { 986 return; 987 } 988 989 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 990 991 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 992 993 theQueryStack.addSortOnResourceId(ascending); 994 995 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 996 997 theQueryStack.addSortOnResourcePID(ascending); 998 999 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 1000 1001 theQueryStack.addSortOnLastUpdated(ascending); 1002 1003 } else { 1004 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 1005 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1006 1007 /* 1008 * If we have a sort like _sort=subject.name and we have an 1009 * uplifted refchain for that combination we can do it more efficiently 1010 * by using the index associated with the uplifted refchain. In this case, 1011 * we need to find the actual target search parameter (corresponding 1012 * to "name" in this example) so that we know what datatype it is. 1013 */ 1014 String paramName = theSort.getParamName(); 1015 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 1016 String[] chains = StringUtils.split(paramName, '.'); 1017 if (chains.length == 2) { 1018 1019 // Given: Encounter?_sort=Patient:subject.name 1020 String referenceParam = chains[0]; // subject 1021 String referenceParamTargetType = null; // Patient 1022 String targetParam = chains[1]; // name 1023 1024 int colonIdx = referenceParam.indexOf(':'); 1025 if (colonIdx > -1) { 1026 referenceParamTargetType = referenceParam.substring(0, colonIdx); 1027 referenceParam = referenceParam.substring(colonIdx + 1); 1028 } 1029 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 1030 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1031 if (outerParam == null) { 1032 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 1033 } else if (outerParam.hasUpliftRefchain(targetParam)) { 1034 for (String nextTargetType : outerParam.getTargets()) { 1035 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 1036 continue; 1037 } 1038 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 1039 nextTargetType, 1040 targetParam, 1041 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1042 if (innerParam != null) { 1043 param = innerParam; 1044 break; 1045 } 1046 } 1047 } 1048 } 1049 } 1050 1051 int colonIdx = paramName.indexOf(':'); 1052 String referenceTargetType = null; 1053 if (colonIdx > -1) { 1054 referenceTargetType = paramName.substring(0, colonIdx); 1055 paramName = paramName.substring(colonIdx + 1); 1056 } 1057 1058 int dotIdx = paramName.indexOf('.'); 1059 String chainName = null; 1060 if (param == null && dotIdx > -1) { 1061 chainName = paramName.substring(dotIdx + 1); 1062 paramName = paramName.substring(0, dotIdx); 1063 if (chainName.contains(".")) { 1064 String msg = myContext 1065 .getLocalizer() 1066 .getMessageSanitized( 1067 BaseStorageDao.class, 1068 "invalidSortParameterTooManyChains", 1069 paramName + "." + chainName); 1070 throw new InvalidRequestException(Msg.code(2286) + msg); 1071 } 1072 } 1073 1074 if (param == null) { 1075 param = mySearchParamRegistry.getActiveSearchParam( 1076 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1077 } 1078 1079 if (param == null) { 1080 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1081 } 1082 1083 // param will never be null here (the above line throws if it does) 1084 // this is just to prevent the warning 1085 assert param != null; 1086 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1087 throw new InvalidRequestException( 1088 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1089 } 1090 1091 switch (param.getParamType()) { 1092 case STRING: 1093 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1094 break; 1095 case DATE: 1096 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1097 break; 1098 case REFERENCE: 1099 theQueryStack.addSortOnResourceLink( 1100 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1101 break; 1102 case TOKEN: 1103 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1104 break; 1105 case NUMBER: 1106 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1107 break; 1108 case URI: 1109 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1110 break; 1111 case QUANTITY: 1112 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1113 break; 1114 case COMPOSITE: 1115 List<RuntimeSearchParam> compositeList = 1116 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1117 if (compositeList == null) { 1118 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1119 + " is not defined by the resource " + myResourceName); 1120 } 1121 if (compositeList.size() != 2) { 1122 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1123 + " must have 2 composite types declared in parameter annotation, found " 1124 + compositeList.size()); 1125 } 1126 RuntimeSearchParam left = compositeList.get(0); 1127 RuntimeSearchParam right = compositeList.get(1); 1128 1129 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1130 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1131 1132 break; 1133 case SPECIAL: 1134 if (LOCATION_POSITION.equals(param.getPath())) { 1135 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1136 break; 1137 } 1138 throw new InvalidRequestException( 1139 Msg.code(2306) + "This server does not support _sort specifications of type " 1140 + param.getParamType() + " - Can't serve _sort=" + paramName); 1141 1142 case HAS: 1143 default: 1144 throw new InvalidRequestException( 1145 Msg.code(1197) + "This server does not support _sort specifications of type " 1146 + param.getParamType() + " - Can't serve _sort=" + paramName); 1147 } 1148 } 1149 1150 // Recurse 1151 createSort(theQueryStack, theSort.getChain(), theParams); 1152 } 1153 1154 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1155 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1156 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1157 String msg = myContext 1158 .getLocalizer() 1159 .getMessageSanitized( 1160 BaseStorageDao.class, 1161 "invalidSortParameter", 1162 theParamName, 1163 theResourceName, 1164 validSearchParameterNames); 1165 throw new InvalidRequestException(Msg.code(1194) + msg); 1166 } 1167 1168 private void createCompositeSort( 1169 QueryStack theQueryStack, 1170 RestSearchParameterTypeEnum theParamType, 1171 String theParamName, 1172 boolean theAscending) { 1173 1174 switch (theParamType) { 1175 case STRING: 1176 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1177 break; 1178 case DATE: 1179 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1180 break; 1181 case TOKEN: 1182 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1183 break; 1184 case QUANTITY: 1185 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1186 break; 1187 case NUMBER: 1188 case REFERENCE: 1189 case COMPOSITE: 1190 case URI: 1191 case HAS: 1192 case SPECIAL: 1193 default: 1194 throw new InvalidRequestException( 1195 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1196 + " on _sort=" + theParamName); 1197 } 1198 } 1199 1200 private void doLoadPids( 1201 Collection<JpaPid> thePids, 1202 Collection<JpaPid> theIncludedPids, 1203 List<IBaseResource> theResourceListToPopulate, 1204 boolean theForHistoryOperation, 1205 Map<Long, Integer> thePosition) { 1206 1207 Map<JpaPid, Long> resourcePidToVersion = null; 1208 for (JpaPid next : thePids) { 1209 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1210 if (resourcePidToVersion == null) { 1211 resourcePidToVersion = new HashMap<>(); 1212 } 1213 resourcePidToVersion.put(next, next.getVersion()); 1214 } 1215 } 1216 1217 List<JpaPid> versionlessPids = new ArrayList<>(thePids); 1218 if (versionlessPids.size() < getMaximumPageSize()) { 1219 versionlessPids = normalizeIdListForInClause(versionlessPids); 1220 } 1221 1222 // Load the resource bodies 1223 List<ResourceHistoryTable> resourceSearchViewList = 1224 myResourceHistoryTableDao.findCurrentVersionsByResourcePidsAndFetchResourceTable( 1225 JpaPidFk.fromPids(versionlessPids)); 1226 1227 /* 1228 * If we have specific versions to load, replace the history entries with the 1229 * correct ones 1230 * 1231 * TODO: this could definitely be made more efficient, probably by not loading the wrong 1232 * version entity first, and by batching the fetches. But this is a fairly infrequently 1233 * used feature, and loading history entities by PK is a very efficient query so it's 1234 * not the end of the world 1235 */ 1236 if (resourcePidToVersion != null) { 1237 for (int i = 0; i < resourceSearchViewList.size(); i++) { 1238 ResourceHistoryTable next = resourceSearchViewList.get(i); 1239 JpaPid resourceId = next.getPersistentId(); 1240 Long version = resourcePidToVersion.get(resourceId); 1241 resourceId.setVersion(version); 1242 if (version != null && !version.equals(next.getVersion())) { 1243 ResourceHistoryTable replacement = myResourceHistoryTableDao.findForIdAndVersion( 1244 next.getResourceId().toFk(), version); 1245 resourceSearchViewList.set(i, replacement); 1246 } 1247 } 1248 } 1249 1250 // -- preload all tags with tag definition if any 1251 Map<JpaPid, Collection<BaseTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1252 1253 for (ResourceHistoryTable next : resourceSearchViewList) { 1254 if (next.getDeleted() != null) { 1255 continue; 1256 } 1257 1258 Class<? extends IBaseResource> resourceType = 1259 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1260 1261 JpaPid resourceId = next.getPersistentId(); 1262 1263 if (resourcePidToVersion != null) { 1264 Long version = resourcePidToVersion.get(resourceId); 1265 resourceId.setVersion(version); 1266 } 1267 1268 IBaseResource resource = null; 1269 if (next != null) { 1270 resource = myJpaStorageResourceParser.toResource( 1271 resourceType, next, tagMap.get(next.getResourceId()), theForHistoryOperation); 1272 } 1273 if (resource == null) { 1274 if (next != null) { 1275 ourLog.warn( 1276 "Unable to find resource {}/{}/_history/{} in database", 1277 next.getResourceType(), 1278 next.getIdDt().getIdPart(), 1279 next.getVersion()); 1280 } else { 1281 ourLog.warn("Unable to find resource in database."); 1282 } 1283 continue; 1284 } 1285 1286 Integer index = thePosition.get(resourceId.getId()); 1287 if (index == null) { 1288 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1289 continue; 1290 } 1291 1292 if (theIncludedPids.contains(resourceId)) { 1293 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1294 } else { 1295 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1296 } 1297 1298 theResourceListToPopulate.set(index, resource); 1299 } 1300 } 1301 1302 private Map<JpaPid, Collection<BaseTag>> getResourceTagMap(Collection<ResourceHistoryTable> theHistoryTables) { 1303 1304 switch (myStorageSettings.getTagStorageMode()) { 1305 case VERSIONED: 1306 return getPidToTagMapVersioned(theHistoryTables); 1307 case NON_VERSIONED: 1308 return getPidToTagMapUnversioned(theHistoryTables); 1309 case INLINE: 1310 default: 1311 return Map.of(); 1312 } 1313 } 1314 1315 @Nonnull 1316 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapVersioned( 1317 Collection<ResourceHistoryTable> theHistoryTables) { 1318 List<ResourceHistoryTablePk> idList = new ArrayList<>(theHistoryTables.size()); 1319 1320 // -- find all resource has tags 1321 for (ResourceHistoryTable resource : theHistoryTables) { 1322 if (resource.isHasTags()) { 1323 idList.add(resource.getId()); 1324 } 1325 } 1326 1327 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1328 1329 // -- no tags 1330 if (idList.isEmpty()) { 1331 return tagMap; 1332 } 1333 1334 // -- get all tags for the idList 1335 Collection<ResourceHistoryTag> tagList = myResourceHistoryTagDao.findByVersionIds(idList); 1336 1337 // -- build the map, key = resourceId, value = list of ResourceTag 1338 JpaPid resourceId; 1339 Collection<BaseTag> tagCol; 1340 for (ResourceHistoryTag tag : tagList) { 1341 1342 resourceId = tag.getResourcePid(); 1343 tagCol = tagMap.get(resourceId); 1344 if (tagCol == null) { 1345 tagCol = new ArrayList<>(); 1346 tagCol.add(tag); 1347 tagMap.put(resourceId, tagCol); 1348 } else { 1349 tagCol.add(tag); 1350 } 1351 } 1352 1353 return tagMap; 1354 } 1355 1356 @Nonnull 1357 private Map<JpaPid, Collection<BaseTag>> getPidToTagMapUnversioned( 1358 Collection<ResourceHistoryTable> theHistoryTables) { 1359 List<JpaPid> idList = new ArrayList<>(theHistoryTables.size()); 1360 1361 // -- find all resource has tags 1362 for (ResourceHistoryTable resource : theHistoryTables) { 1363 if (resource.isHasTags()) { 1364 idList.add(resource.getResourceId()); 1365 } 1366 } 1367 1368 Map<JpaPid, Collection<BaseTag>> tagMap = new HashMap<>(); 1369 1370 // -- no tags 1371 if (idList.isEmpty()) { 1372 return tagMap; 1373 } 1374 1375 // -- get all tags for the idList 1376 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(idList); 1377 1378 // -- build the map, key = resourceId, value = list of ResourceTag 1379 JpaPid resourceId; 1380 Collection<BaseTag> tagCol; 1381 for (ResourceTag tag : tagList) { 1382 1383 resourceId = tag.getResourceId(); 1384 tagCol = tagMap.get(resourceId); 1385 if (tagCol == null) { 1386 tagCol = new ArrayList<>(); 1387 tagCol.add(tag); 1388 tagMap.put(resourceId, tagCol); 1389 } else { 1390 tagCol.add(tag); 1391 } 1392 } 1393 1394 return tagMap; 1395 } 1396 1397 @Override 1398 public void loadResourcesByPid( 1399 Collection<JpaPid> thePids, 1400 Collection<JpaPid> theIncludedPids, 1401 List<IBaseResource> theResourceListToPopulate, 1402 boolean theForHistoryOperation, 1403 RequestDetails theDetails) { 1404 if (thePids.isEmpty()) { 1405 ourLog.debug("The include pids are empty"); 1406 } 1407 1408 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1409 // when running asserts 1410 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1411 1412 Map<Long, Integer> position = new HashMap<>(); 1413 for (JpaPid next : thePids) { 1414 position.put(next.getId(), theResourceListToPopulate.size()); 1415 theResourceListToPopulate.add(null); 1416 } 1417 1418 // Can we fast track this loading by checking elastic search? 1419 if (isLoadingFromElasticSearchSupported(thePids)) { 1420 try { 1421 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1422 return; 1423 1424 } catch (ResourceNotFoundInIndexException theE) { 1425 // some resources were not found in index, so we will inform this and resort to JPA search 1426 ourLog.warn( 1427 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1428 } 1429 } 1430 1431 // We only chunk because some jdbc drivers can't handle long param lists. 1432 QueryChunker.chunk( 1433 thePids, 1434 t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1435 } 1436 1437 /** 1438 * Check if we can load the resources from Hibernate Search instead of the database. 1439 * We assume this is faster. 1440 * <p> 1441 * Hibernate Search only stores the current version, and only if enabled. 1442 * 1443 * @param thePids the pids to check for versioned references 1444 * @return can we fetch from Hibernate Search? 1445 */ 1446 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1447 // is storage enabled? 1448 return myStorageSettings.isStoreResourceInHSearchIndex() 1449 && myStorageSettings.isAdvancedHSearchIndexing() 1450 && 1451 // we don't support history 1452 thePids.stream().noneMatch(p -> p.getVersion() != null) 1453 && 1454 // skip the complexity for metadata in dstu2 1455 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1456 } 1457 1458 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1459 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1460 // only impl 1461 // to handle lastN? 1462 if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) { 1463 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1464 1465 return myFulltextSearchSvc.getResources(pidList); 1466 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1467 // legacy LastN implementation 1468 return myIElasticsearchSvc.getObservationResources(thePids); 1469 } else { 1470 return Collections.emptyList(); 1471 } 1472 } 1473 1474 /** 1475 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1476 * so it can't be Collections.emptySet() or some such thing. 1477 * The JpaPid returned will have resource type populated. 1478 */ 1479 @Override 1480 public Set<JpaPid> loadIncludes( 1481 FhirContext theContext, 1482 EntityManager theEntityManager, 1483 Collection<JpaPid> theMatches, 1484 Collection<Include> theIncludes, 1485 boolean theReverseMode, 1486 DateRangeParam theLastUpdated, 1487 String theSearchIdOrDescription, 1488 RequestDetails theRequest, 1489 Integer theMaxCount) { 1490 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1491 parameters.setFhirContext(theContext); 1492 parameters.setEntityManager(theEntityManager); 1493 parameters.setMatches(theMatches); 1494 parameters.setIncludeFilters(theIncludes); 1495 parameters.setReverseMode(theReverseMode); 1496 parameters.setLastUpdated(theLastUpdated); 1497 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1498 parameters.setRequestDetails(theRequest); 1499 parameters.setMaxCount(theMaxCount); 1500 return loadIncludes(parameters); 1501 } 1502 1503 @Override 1504 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1505 Collection<JpaPid> matches = theParameters.getMatches(); 1506 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1507 boolean reverseMode = theParameters.isReverseMode(); 1508 EntityManager entityManager = theParameters.getEntityManager(); 1509 Integer maxCount = theParameters.getMaxCount(); 1510 FhirContext fhirContext = theParameters.getFhirContext(); 1511 RequestDetails request = theParameters.getRequestDetails(); 1512 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1513 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1514 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1515 IInterceptorBroadcaster compositeBroadcaster = 1516 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, request); 1517 1518 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1519 CurrentThreadCaptureQueriesListener.startCapturing(); 1520 } 1521 if (matches.isEmpty()) { 1522 return new HashSet<>(); 1523 } 1524 if (currentIncludes == null || currentIncludes.isEmpty()) { 1525 return new HashSet<>(); 1526 } 1527 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1528 String searchPartitionIdFieldName = 1529 reverseMode ? MY_TARGET_RESOURCE_PARTITION_ID : MY_SOURCE_RESOURCE_PARTITION_ID; 1530 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1531 String findPartitionIdFieldName = 1532 reverseMode ? MY_SOURCE_RESOURCE_PARTITION_ID : MY_TARGET_RESOURCE_PARTITION_ID; 1533 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1534 String findVersionFieldName = null; 1535 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1536 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1537 } 1538 1539 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1540 HashSet<JpaPid> allAdded = new HashSet<>(); 1541 HashSet<JpaPid> original = new HashSet<>(matches); 1542 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1543 1544 int roundCounts = 0; 1545 StopWatch w = new StopWatch(); 1546 1547 boolean addedSomeThisRound; 1548 do { 1549 roundCounts++; 1550 1551 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1552 1553 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1554 Include nextInclude = iter.next(); 1555 if (!nextInclude.isRecurse()) { 1556 iter.remove(); 1557 } 1558 1559 // Account for _include=* 1560 boolean matchAll = "*".equals(nextInclude.getValue()); 1561 1562 // Account for _include=[resourceType]:* 1563 String wantResourceType = null; 1564 if (!matchAll) { 1565 if ("*".equals(nextInclude.getParamName())) { 1566 wantResourceType = nextInclude.getParamType(); 1567 matchAll = true; 1568 } 1569 } 1570 1571 if (matchAll) { 1572 loadIncludesMatchAll( 1573 findPidFieldName, 1574 findPartitionIdFieldName, 1575 findResourceTypeFieldName, 1576 findVersionFieldName, 1577 searchPidFieldName, 1578 searchPartitionIdFieldName, 1579 wantResourceType, 1580 reverseMode, 1581 hasDesiredResourceTypes, 1582 nextRoundMatches, 1583 entityManager, 1584 maxCount, 1585 desiredResourceTypes, 1586 pidsToInclude, 1587 request); 1588 } else { 1589 loadIncludesMatchSpecific( 1590 nextInclude, 1591 fhirContext, 1592 findPidFieldName, 1593 findPartitionIdFieldName, 1594 findVersionFieldName, 1595 searchPidFieldName, 1596 searchPartitionIdFieldName, 1597 reverseMode, 1598 nextRoundMatches, 1599 entityManager, 1600 maxCount, 1601 pidsToInclude, 1602 request); 1603 } 1604 } 1605 1606 nextRoundMatches.clear(); 1607 for (JpaPid next : pidsToInclude) { 1608 if (!original.contains(next) && !allAdded.contains(next)) { 1609 nextRoundMatches.add(next); 1610 } 1611 } 1612 1613 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1614 1615 if (maxCount != null && allAdded.size() >= maxCount) { 1616 break; 1617 } 1618 1619 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1620 1621 allAdded.removeAll(original); 1622 1623 ourLog.info( 1624 "Loaded {} {} in {} rounds and {} ms for search {}", 1625 allAdded.size(), 1626 reverseMode ? "_revincludes" : "_includes", 1627 roundCounts, 1628 w.getMillisAndRestart(), 1629 searchIdOrDescription); 1630 1631 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL)) { 1632 callRawSqlHookWithCurrentThreadQueries(request, compositeBroadcaster); 1633 } 1634 1635 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1636 // This can be used to remove results from the search result details before 1637 // the user has a chance to know that they were in the results 1638 if (!allAdded.isEmpty()) { 1639 1640 if (compositeBroadcaster.hasHooks(Pointcut.STORAGE_PREACCESS_RESOURCES)) { 1641 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1642 JpaPreResourceAccessDetails accessDetails = 1643 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1644 HookParams params = new HookParams() 1645 .add(IPreResourceAccessDetails.class, accessDetails) 1646 .add(RequestDetails.class, request) 1647 .addIfMatchesType(ServletRequestDetails.class, request); 1648 compositeBroadcaster.callHooks(Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1649 1650 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1651 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1652 JpaPid value = includedPidList.remove(i); 1653 if (value != null) { 1654 allAdded.remove(value); 1655 } 1656 } 1657 } 1658 } 1659 } 1660 1661 return allAdded; 1662 } 1663 1664 private void loadIncludesMatchSpecific( 1665 Include nextInclude, 1666 FhirContext fhirContext, 1667 String findPidFieldName, 1668 String findPartitionFieldName, 1669 String findVersionFieldName, 1670 String searchPidFieldName, 1671 String searchPartitionFieldName, 1672 boolean reverseMode, 1673 List<JpaPid> nextRoundMatches, 1674 EntityManager entityManager, 1675 Integer maxCount, 1676 HashSet<JpaPid> pidsToInclude, 1677 RequestDetails theRequest) { 1678 List<String> paths; 1679 1680 // Start replace 1681 RuntimeSearchParam param; 1682 String resType = nextInclude.getParamType(); 1683 if (isBlank(resType)) { 1684 return; 1685 } 1686 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1687 if (def == null) { 1688 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1689 return; 1690 } 1691 1692 String paramName = nextInclude.getParamName(); 1693 if (isNotBlank(paramName)) { 1694 param = mySearchParamRegistry.getActiveSearchParam( 1695 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1696 } else { 1697 param = null; 1698 } 1699 if (param == null) { 1700 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1701 return; 1702 } 1703 1704 paths = param.getPathsSplitForResourceType(resType); 1705 // end replace 1706 1707 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1708 1709 for (String nextPath : paths) { 1710 String findPidFieldSqlColumn = 1711 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1712 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1713 if (findVersionFieldName != null) { 1714 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1715 } 1716 if (myPartitionSettings.isDatabasePartitionMode()) { 1717 fieldsToLoad += ", r."; 1718 fieldsToLoad += findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1719 ? "partition_id" 1720 : "target_res_partition_id"; 1721 fieldsToLoad += " as " + PARTITION_ID_ALIAS; 1722 } 1723 1724 // Query for includes lookup has 2 cases 1725 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1726 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1727 // url in target_resource_url 1728 1729 // Case 1: 1730 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1731 1732 String searchPidFieldSqlColumn = 1733 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1734 StringBuilder localReferenceQuery = new StringBuilder(); 1735 localReferenceQuery.append("SELECT ").append(fieldsToLoad); 1736 localReferenceQuery.append(" FROM hfj_res_link r "); 1737 localReferenceQuery.append("WHERE r.src_path = :src_path"); 1738 if (!"target_resource_id".equals(searchPidFieldSqlColumn)) { 1739 localReferenceQuery.append(" AND r.target_resource_id IS NOT NULL"); 1740 } 1741 localReferenceQuery 1742 .append(" AND r.") 1743 .append(searchPidFieldSqlColumn) 1744 .append(" IN (:target_pids) "); 1745 if (myPartitionSettings.isDatabasePartitionMode()) { 1746 String partitionFieldToSearch = findPartitionFieldName.equals(MY_SOURCE_RESOURCE_PARTITION_ID) 1747 ? "target_res_partition_id" 1748 : "partition_id"; 1749 localReferenceQuery 1750 .append("AND r.") 1751 .append(partitionFieldToSearch) 1752 .append(" = :search_partition_id "); 1753 } 1754 localReferenceQueryParams.put("src_path", nextPath); 1755 // we loop over target_pids later. 1756 if (targetResourceTypes != null) { 1757 if (targetResourceTypes.size() == 1) { 1758 localReferenceQuery.append("AND r.target_resource_type = :target_resource_type "); 1759 localReferenceQueryParams.put( 1760 "target_resource_type", 1761 targetResourceTypes.iterator().next()); 1762 } else { 1763 localReferenceQuery.append("AND r.target_resource_type in (:target_resource_types) "); 1764 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1765 } 1766 } 1767 1768 // Case 2: 1769 Pair<String, Map<String, Object>> canonicalQuery = 1770 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode, theRequest); 1771 1772 String sql = localReferenceQuery + "UNION " + canonicalQuery.getLeft(); 1773 1774 Map<String, Object> limitParams = new HashMap<>(); 1775 if (maxCount != null) { 1776 LinkedList<Object> bindVariables = new LinkedList<>(); 1777 sql = SearchQueryBuilder.applyLimitToSql( 1778 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1779 1780 // The dialect SQL limiter uses positional params, but we're using 1781 // named params here, so we need to replace the positional params 1782 // with equivalent named ones 1783 StringBuilder sb = new StringBuilder(); 1784 for (int i = 0; i < sql.length(); i++) { 1785 char nextChar = sql.charAt(i); 1786 if (nextChar == '?') { 1787 String nextName = "limit" + i; 1788 sb.append(':').append(nextName); 1789 limitParams.put(nextName, bindVariables.removeFirst()); 1790 } else { 1791 sb.append(nextChar); 1792 } 1793 } 1794 sql = sb.toString(); 1795 } 1796 1797 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1798 for (Collection<JpaPid> nextPartition : partitions) { 1799 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1800 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1801 if (myPartitionSettings.isDatabasePartitionMode()) { 1802 q.setParameter( 1803 "search_partition_id", 1804 nextPartition.iterator().next().getPartitionId()); 1805 } 1806 localReferenceQueryParams.forEach(q::setParameter); 1807 canonicalQuery.getRight().forEach(q::setParameter); 1808 limitParams.forEach(q::setParameter); 1809 1810 @SuppressWarnings("unchecked") 1811 List<Tuple> results = q.getResultList(); 1812 for (Tuple result : results) { 1813 if (result != null) { 1814 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1815 Long resourceVersion = null; 1816 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1817 resourceVersion = 1818 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1819 } 1820 Integer partitionId = null; 1821 if (myPartitionSettings.isDatabasePartitionMode()) { 1822 partitionId = result.get(PARTITION_ID_ALIAS, Integer.class); 1823 } 1824 1825 JpaPid pid = JpaPid.fromIdAndVersion(resourceId, resourceVersion); 1826 pid.setPartitionId(partitionId); 1827 pidsToInclude.add(pid); 1828 } 1829 } 1830 } 1831 } 1832 } 1833 1834 private void loadIncludesMatchAll( 1835 String findPidFieldName, 1836 String findPartitionFieldName, 1837 String findResourceTypeFieldName, 1838 String findVersionFieldName, 1839 String searchPidFieldName, 1840 String searchPartitionFieldName, 1841 String wantResourceType, 1842 boolean reverseMode, 1843 boolean hasDesiredResourceTypes, 1844 List<JpaPid> nextRoundMatches, 1845 EntityManager entityManager, 1846 Integer maxCount, 1847 List<String> desiredResourceTypes, 1848 HashSet<JpaPid> pidsToInclude, 1849 RequestDetails request) { 1850 StringBuilder sqlBuilder = new StringBuilder(); 1851 sqlBuilder.append("SELECT r.").append(findPidFieldName); 1852 sqlBuilder.append(", r.").append(findResourceTypeFieldName); 1853 sqlBuilder.append(", r.myTargetResourceUrl"); 1854 if (findVersionFieldName != null) { 1855 sqlBuilder.append(", r.").append(findVersionFieldName); 1856 } 1857 if (myPartitionSettings.isDatabasePartitionMode()) { 1858 sqlBuilder.append(", r.").append(findPartitionFieldName); 1859 } 1860 sqlBuilder.append(" FROM ResourceLink r WHERE "); 1861 1862 if (myPartitionSettings.isDatabasePartitionMode()) { 1863 sqlBuilder.append("r.").append(searchPartitionFieldName); 1864 sqlBuilder.append(" = :target_partition_id AND "); 1865 } 1866 1867 sqlBuilder.append("r.").append(searchPidFieldName); 1868 sqlBuilder.append(" IN (:target_pids)"); 1869 1870 /* 1871 * We need to set the resource type in 2 cases only: 1872 * 1) we are in $everything mode 1873 * (where we only want to fetch specific resource types, regardless of what is 1874 * available to fetch) 1875 * 2) we are doing revincludes 1876 * 1877 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1878 * should always be checking the source resource type on the resource link. We don't 1879 * actually index that column though by default, so in order to try and be efficient 1880 * we don't actually include it for includes (but we do for revincludes). This is 1881 * because for an include, it doesn't really make sense to include a different 1882 * resource type than the one you are searching on. 1883 */ 1884 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1885 // because mySourceResourceType is not part of the HFJ_RES_LINK 1886 // index, this might not be the most optimal performance. 1887 // but it is for an $everything operation (and maybe we should update the index) 1888 sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type"); 1889 } else { 1890 wantResourceType = null; 1891 } 1892 1893 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1894 // resources 1895 // (e.g. via Provenance, List, or Group) when in an $everything operation 1896 if (myParams != null 1897 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1898 sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); 1899 sqlBuilder.append(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE.stream() 1900 .collect(Collectors.joining("', '", " AND r.mySourceResourceType NOT IN ('", "')"))); 1901 } 1902 if (hasDesiredResourceTypes) { 1903 sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)"); 1904 } 1905 1906 String sql = sqlBuilder.toString(); 1907 List<Collection<JpaPid>> partitions = partitionBySizeAndPartitionId(nextRoundMatches, getMaximumPageSize()); 1908 for (Collection<JpaPid> nextPartition : partitions) { 1909 TypedQuery<?> q = entityManager.createQuery(sql, Object[].class); 1910 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1911 if (myPartitionSettings.isDatabasePartitionMode()) { 1912 q.setParameter( 1913 "target_partition_id", nextPartition.iterator().next().getPartitionId()); 1914 } 1915 if (wantResourceType != null) { 1916 q.setParameter("want_resource_type", wantResourceType); 1917 } 1918 if (maxCount != null) { 1919 q.setMaxResults(maxCount); 1920 } 1921 if (hasDesiredResourceTypes) { 1922 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1923 } 1924 List<?> results = q.getResultList(); 1925 Set<String> canonicalUrls = null; 1926 for (Object nextRow : results) { 1927 if (nextRow == null) { 1928 // This can happen if there are outgoing references which are canonical or point to 1929 // other servers 1930 continue; 1931 } 1932 1933 Long version = null; 1934 Long resourceId = (Long) ((Object[]) nextRow)[0]; 1935 String resourceType = (String) ((Object[]) nextRow)[1]; 1936 String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; 1937 Integer partitionId = null; 1938 int offset = 0; 1939 if (findVersionFieldName != null) { 1940 version = (Long) ((Object[]) nextRow)[3]; 1941 offset++; 1942 } 1943 if (myPartitionSettings.isDatabasePartitionMode()) { 1944 partitionId = ((Integer) ((Object[]) nextRow)[3 + offset]); 1945 } 1946 1947 if (resourceId != null) { 1948 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1949 pid.setPartitionId(partitionId); 1950 pidsToInclude.add(pid); 1951 } else if (resourceCanonicalUrl != null) { 1952 if (canonicalUrls == null) { 1953 canonicalUrls = new HashSet<>(); 1954 } 1955 canonicalUrls.add(resourceCanonicalUrl); 1956 } 1957 } 1958 1959 if (canonicalUrls != null) { 1960 String message = 1961 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 1962 firePerformanceWarning(request, message); 1963 loadCanonicalUrls(request, canonicalUrls, entityManager, pidsToInclude, reverseMode); 1964 } 1965 } 1966 } 1967 1968 private void loadCanonicalUrls( 1969 RequestDetails theRequestDetails, 1970 Set<String> theCanonicalUrls, 1971 EntityManager theEntityManager, 1972 HashSet<JpaPid> thePidsToInclude, 1973 boolean theReverse) { 1974 StringBuilder sqlBuilder; 1975 CanonicalUrlTargets canonicalUrlTargets = 1976 calculateIndexUriIdentityHashesForResourceTypes(theRequestDetails, null, theReverse); 1977 List<List<String>> canonicalUrlPartitions = ListUtils.partition( 1978 List.copyOf(theCanonicalUrls), getMaximumPageSize() - canonicalUrlTargets.myHashIdentityValues.size()); 1979 1980 sqlBuilder = new StringBuilder(); 1981 sqlBuilder.append("SELECT "); 1982 if (myPartitionSettings.isPartitioningEnabled()) { 1983 sqlBuilder.append("i.myPartitionIdValue, "); 1984 } 1985 sqlBuilder.append("i.myResourcePid "); 1986 1987 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 1988 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 1989 sqlBuilder.append("AND i.myUri IN (:uris)"); 1990 1991 String canonicalResSql = sqlBuilder.toString(); 1992 1993 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 1994 TypedQuery<Object[]> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Object[].class); 1995 canonicalResIdQuery.setParameter("hash_identity", canonicalUrlTargets.myHashIdentityValues); 1996 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 1997 List<Object[]> results = canonicalResIdQuery.getResultList(); 1998 for (var next : results) { 1999 if (next != null) { 2000 Integer partitionId = null; 2001 Long pid; 2002 if (next.length == 1) { 2003 pid = (Long) next[0]; 2004 } else { 2005 partitionId = (Integer) ((Object[]) next)[0]; 2006 pid = (Long) ((Object[]) next)[1]; 2007 } 2008 if (pid != null) { 2009 thePidsToInclude.add(JpaPid.fromId(pid, partitionId)); 2010 } 2011 } 2012 } 2013 } 2014 } 2015 2016 /** 2017 * Calls Performance Trace Hook 2018 * 2019 * @param request the request deatils 2020 * Sends a raw SQL query to the Pointcut for raw SQL queries. 2021 */ 2022 private void callRawSqlHookWithCurrentThreadQueries( 2023 RequestDetails request, IInterceptorBroadcaster theCompositeBroadcaster) { 2024 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 2025 HookParams params = new HookParams() 2026 .add(RequestDetails.class, request) 2027 .addIfMatchesType(ServletRequestDetails.class, request) 2028 .add(SqlQueryList.class, capturedQueries); 2029 theCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_RAW_SQL, params); 2030 } 2031 2032 @Nullable 2033 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 2034 String targetResourceType = defaultString(nextInclude.getParamTargetType(), null); 2035 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 2036 Set<String> targetResourceTypes; 2037 if (targetResourceType != null) { 2038 targetResourceTypes = Set.of(targetResourceType); 2039 } else if (haveTargetTypesDefinedByParam) { 2040 targetResourceTypes = param.getTargets(); 2041 } else { 2042 // all types! 2043 targetResourceTypes = null; 2044 } 2045 return targetResourceTypes; 2046 } 2047 2048 @Nonnull 2049 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 2050 String theVersionFieldName, 2051 Set<String> theTargetResourceTypes, 2052 boolean theReverse, 2053 RequestDetails theRequest) { 2054 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 2055 if (theVersionFieldName != null) { 2056 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 2057 fieldsToLoadFromSpidxUriTable += ", NULL"; 2058 } 2059 2060 if (myPartitionSettings.isDatabasePartitionMode()) { 2061 if (theReverse) { 2062 fieldsToLoadFromSpidxUriTable += ", r.partition_id as " + PARTITION_ID_ALIAS; 2063 } else { 2064 fieldsToLoadFromSpidxUriTable += ", rUri.partition_id as " + PARTITION_ID_ALIAS; 2065 } 2066 } 2067 2068 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 2069 // But sp_name isn't indexed, so we use hash_identity instead. 2070 CanonicalUrlTargets canonicalUrlTargets = 2071 calculateIndexUriIdentityHashesForResourceTypes(theRequest, theTargetResourceTypes, theReverse); 2072 2073 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 2074 StringBuilder canonicalUrlQuery = new StringBuilder(); 2075 canonicalUrlQuery 2076 .append("SELECT ") 2077 .append(fieldsToLoadFromSpidxUriTable) 2078 .append(' '); 2079 canonicalUrlQuery.append("FROM hfj_res_link r "); 2080 2081 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 2082 canonicalUrlQuery.append("JOIN hfj_spidx_uri rUri ON ("); 2083 if (myPartitionSettings.isDatabasePartitionMode()) { 2084 canonicalUrlQuery.append("rUri.partition_id IN (:uri_partition_id) AND "); 2085 canonicalUriQueryParams.put("uri_partition_id", canonicalUrlTargets.myPartitionIds); 2086 } 2087 if (canonicalUrlTargets.myHashIdentityValues.size() == 1) { 2088 canonicalUrlQuery.append("rUri.hash_identity = :uri_identity_hash"); 2089 canonicalUriQueryParams.put( 2090 "uri_identity_hash", 2091 canonicalUrlTargets.myHashIdentityValues.iterator().next()); 2092 } else { 2093 canonicalUrlQuery.append("rUri.hash_identity in (:uri_identity_hashes)"); 2094 canonicalUriQueryParams.put("uri_identity_hashes", canonicalUrlTargets.myHashIdentityValues); 2095 } 2096 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri"); 2097 canonicalUrlQuery.append(")"); 2098 2099 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND"); 2100 canonicalUrlQuery.append(" r.target_resource_id IS NULL"); 2101 canonicalUrlQuery.append(" AND"); 2102 if (myPartitionSettings.isDatabasePartitionMode()) { 2103 if (theReverse) { 2104 canonicalUrlQuery.append(" rUri.partition_id"); 2105 } else { 2106 canonicalUrlQuery.append(" r.partition_id"); 2107 } 2108 canonicalUrlQuery.append(" = :search_partition_id"); 2109 canonicalUrlQuery.append(" AND"); 2110 } 2111 if (theReverse) { 2112 canonicalUrlQuery.append(" rUri.res_id"); 2113 } else { 2114 canonicalUrlQuery.append(" r.src_resource_id"); 2115 } 2116 canonicalUrlQuery.append(" IN (:target_pids)"); 2117 2118 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 2119 } 2120 2121 @Nonnull 2122 CanonicalUrlTargets calculateIndexUriIdentityHashesForResourceTypes( 2123 RequestDetails theRequestDetails, Set<String> theTargetResourceTypes, boolean theReverse) { 2124 Set<String> targetResourceTypes = theTargetResourceTypes; 2125 if (targetResourceTypes == null) { 2126 /* 2127 * If we don't have a list of valid target types, we need to figure out a list of all 2128 * possible target types in order to perform the search of the URI index table. This is 2129 * because the hash_identity column encodes the resource type, so we'll need a hash 2130 * value for each possible target type. 2131 */ 2132 targetResourceTypes = new HashSet<>(); 2133 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 2134 if (theReverse) { 2135 // For reverse includes, it is really hard to figure out what types 2136 // are actually potentially pointing to the type we're searching for 2137 // in this context, so let's just assume it could be anything. 2138 targetResourceTypes = possibleTypes; 2139 } else { 2140 for (var next : mySearchParamRegistry 2141 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 2142 .values() 2143 .stream() 2144 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 2145 .collect(Collectors.toList())) { 2146 2147 // If the reference points to a Reference (ie not a canonical or CanonicalReference) 2148 // then it doesn't matter here anyhow. The logic here only works for elements at the 2149 // root level of the document (e.g. QuestionnaireResponse.subject or 2150 // QuestionnaireResponse.subject.where(...)) but this is just an optimization 2151 // anyhow. 2152 if (next.getPath().startsWith(myResourceName + ".")) { 2153 String elementName = 2154 next.getPath().substring(next.getPath().indexOf('.') + 1); 2155 int secondDotIndex = elementName.indexOf('.'); 2156 if (secondDotIndex != -1) { 2157 elementName = elementName.substring(0, secondDotIndex); 2158 } 2159 BaseRuntimeChildDefinition child = 2160 myContext.getResourceDefinition(myResourceName).getChildByName(elementName); 2161 if (child != null) { 2162 BaseRuntimeElementDefinition<?> childDef = child.getChildByName(elementName); 2163 if (childDef != null) { 2164 if (childDef.getName().equals("Reference")) { 2165 continue; 2166 } 2167 } 2168 } 2169 } 2170 2171 if (!next.getTargets().isEmpty()) { 2172 // For each reference parameter on the resource type we're searching for, 2173 // add all the potential target types to the list of possible target 2174 // resource types we can look up. 2175 for (var nextTarget : next.getTargets()) { 2176 if (possibleTypes.contains(nextTarget)) { 2177 targetResourceTypes.add(nextTarget); 2178 } 2179 } 2180 } else { 2181 // If we have any references that don't define any target types, then 2182 // we need to assume that all enabled resource types are possible target 2183 // types 2184 targetResourceTypes.addAll(possibleTypes); 2185 break; 2186 } 2187 } 2188 } 2189 } 2190 assert !targetResourceTypes.isEmpty(); 2191 2192 Set<Long> hashIdentityValues = new HashSet<>(); 2193 Set<Integer> partitionIds = new HashSet<>(); 2194 for (String type : targetResourceTypes) { 2195 2196 RequestPartitionId readPartition; 2197 if (myPartitionSettings.isPartitioningEnabled()) { 2198 readPartition = 2199 myPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, type); 2200 } else { 2201 readPartition = RequestPartitionId.defaultPartition(); 2202 } 2203 if (readPartition.hasPartitionIds()) { 2204 partitionIds.addAll(readPartition.getPartitionIds()); 2205 } 2206 2207 Long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity( 2208 myPartitionSettings, readPartition, type, "url"); 2209 hashIdentityValues.add(hashIdentity); 2210 } 2211 2212 return new CanonicalUrlTargets(hashIdentityValues, partitionIds); 2213 } 2214 2215 static class CanonicalUrlTargets { 2216 2217 @Nonnull 2218 final Set<Long> myHashIdentityValues; 2219 2220 @Nonnull 2221 final Set<Integer> myPartitionIds; 2222 2223 public CanonicalUrlTargets(@Nonnull Set<Long> theHashIdentityValues, @Nonnull Set<Integer> thePartitionIds) { 2224 myHashIdentityValues = theHashIdentityValues; 2225 myPartitionIds = thePartitionIds; 2226 } 2227 } 2228 2229 /** 2230 * This method takes in a list of {@link JpaPid}'s and returns a series of sublists containing 2231 * those pids where: 2232 * <ul> 2233 * <li>No single list is most than {@literal theMaxLoad} entries</li> 2234 * <li>Each list only contains JpaPids with the same partition ID</li> 2235 * </ul> 2236 */ 2237 static List<Collection<JpaPid>> partitionBySizeAndPartitionId(List<JpaPid> theNextRoundMatches, int theMaxLoad) { 2238 2239 if (theNextRoundMatches.size() <= theMaxLoad) { 2240 boolean allSamePartition = true; 2241 for (int i = 1; i < theNextRoundMatches.size(); i++) { 2242 if (!Objects.equals( 2243 theNextRoundMatches.get(i - 1).getPartitionId(), 2244 theNextRoundMatches.get(i).getPartitionId())) { 2245 allSamePartition = false; 2246 break; 2247 } 2248 } 2249 if (allSamePartition) { 2250 return Collections.singletonList(theNextRoundMatches); 2251 } 2252 } 2253 2254 // Break into partitioned sublists 2255 ListMultimap<String, JpaPid> lists = 2256 MultimapBuilder.hashKeys().arrayListValues().build(); 2257 for (JpaPid nextRoundMatch : theNextRoundMatches) { 2258 String partitionId = nextRoundMatch.getPartitionId() != null 2259 ? nextRoundMatch.getPartitionId().toString() 2260 : ""; 2261 lists.put(partitionId, nextRoundMatch); 2262 } 2263 2264 List<Collection<JpaPid>> retVal = new ArrayList<>(); 2265 for (String key : lists.keySet()) { 2266 List<List<JpaPid>> nextPartition = Lists.partition(lists.get(key), theMaxLoad); 2267 retVal.addAll(nextPartition); 2268 } 2269 2270 // In unit test mode, we sort the results just for unit test predictability 2271 if (HapiSystemProperties.isUnitTestModeEnabled()) { 2272 retVal = retVal.stream() 2273 .map(t -> t.stream().sorted().collect(Collectors.toList())) 2274 .collect(Collectors.toList()); 2275 } 2276 2277 return retVal; 2278 } 2279 2280 private void attemptComboUniqueSpProcessing( 2281 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 2282 RuntimeSearchParam comboParam = null; 2283 List<String> comboParamNames = null; 2284 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 2285 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2286 if (!exactMatchParams.isEmpty()) { 2287 comboParam = exactMatchParams.get(0); 2288 comboParamNames = new ArrayList<>(theParams.keySet()); 2289 } 2290 2291 if (comboParam == null) { 2292 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 2293 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2294 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2295 List<String> nextCandidateParamNames = 2296 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2297 .map(RuntimeSearchParam::getName) 2298 .collect(Collectors.toList()); 2299 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2300 comboParam = nextCandidate; 2301 comboParamNames = nextCandidateParamNames; 2302 break; 2303 } 2304 } 2305 } 2306 2307 if (comboParam != null) { 2308 Collections.sort(comboParamNames); 2309 2310 // Since we're going to remove elements below 2311 theParams.values().forEach(this::ensureSubListsAreWritable); 2312 2313 /* 2314 * Apply search against the combo param index in a loop: 2315 * 2316 * 1. First we check whether the actual parameter values in the 2317 * parameter map are actually usable for searching against the combo 2318 * param index. E.g. no search modifiers, date comparators, etc., 2319 * since these mean you can't use the combo index. 2320 * 2321 * 2. Apply and create the join SQl. We remove parameter values from 2322 * the map as we apply them, so any parameter values remaining in the 2323 * map after each loop haven't yet been factored into the SQL. 2324 * 2325 * The loop allows us to create multiple combo index joins if there 2326 * are multiple AND expressions for the related parameters. 2327 */ 2328 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2329 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2330 } 2331 } 2332 } 2333 2334 private void applyComboSearchParam( 2335 QueryStack theQueryStack, 2336 @Nonnull SearchParameterMap theParams, 2337 RequestDetails theRequest, 2338 List<String> theComboParamNames, 2339 RuntimeSearchParam theComboParam) { 2340 2341 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2342 for (String nextParamName : theComboParamNames) { 2343 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2344 inputs.add(nextValues); 2345 } 2346 2347 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2348 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2349 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2350 2351 StringBuilder searchStringBuilder = new StringBuilder(); 2352 searchStringBuilder.append(myResourceName); 2353 searchStringBuilder.append("?"); 2354 2355 boolean first = true; 2356 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2357 2358 String nextParamName = theComboParamNames.get(paramIndex); 2359 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2360 // The only prefix accepted when combo searching is 'eq' (see validateParamValuesAreValidForComboParam). 2361 // As a result, we strip the prefix if present. 2362 String nextOrValue = stripStart(nextOr.getValueAsQueryToken(myContext), EQUAL.getValue()); 2363 2364 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2365 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2366 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2367 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2368 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2369 } 2370 } 2371 2372 if (first) { 2373 first = false; 2374 } else { 2375 searchStringBuilder.append('&'); 2376 } 2377 2378 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2379 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2380 2381 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2382 } 2383 2384 String indexString = searchStringBuilder.toString(); 2385 ourLog.debug( 2386 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2387 2388 indexStrings.add(indexString); 2389 } 2390 2391 // Just to make sure we're stable for tests 2392 indexStrings.sort(Comparator.naturalOrder()); 2393 2394 // Interceptor broadcast: JPA_PERFTRACE_INFO 2395 IInterceptorBroadcaster compositeBroadcaster = 2396 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2397 if (compositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_INFO)) { 2398 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2399 StorageProcessingMessage msg = new StorageProcessingMessage() 2400 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2401 + indexStringForLog); 2402 HookParams params = new HookParams() 2403 .add(RequestDetails.class, theRequest) 2404 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2405 .add(StorageProcessingMessage.class, msg); 2406 compositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_INFO, params); 2407 } 2408 2409 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2410 case UNIQUE: 2411 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2412 break; 2413 case NON_UNIQUE: 2414 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2415 break; 2416 } 2417 2418 // Remove any empty parameters remaining after this 2419 theParams.clean(); 2420 } 2421 2422 /** 2423 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2424 * searching against a combo param with the given parameter names. This might be {@literal false} if 2425 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2426 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2427 */ 2428 private boolean validateParamValuesAreValidForComboParam( 2429 RequestDetails theRequest, 2430 @Nonnull SearchParameterMap theParams, 2431 List<String> theComboParamNames, 2432 RuntimeSearchParam theComboParam) { 2433 boolean paramValuesAreValidForCombo = true; 2434 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2435 2436 for (String nextParamName : theComboParamNames) { 2437 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2438 2439 if (nextValues == null || nextValues.isEmpty()) { 2440 paramValuesAreValidForCombo = false; 2441 break; 2442 } 2443 2444 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2445 paramOrValues.add(nextAndValue); 2446 2447 for (IQueryParameterType nextOrValue : nextAndValue) { 2448 if (nextOrValue instanceof DateParam) { 2449 DateParam dateParam = (DateParam) nextOrValue; 2450 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2451 String message = "Search with params " + theComboParamNames 2452 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2453 + nextParamName + "'"; 2454 firePerformanceInfo(theRequest, message); 2455 paramValuesAreValidForCombo = false; 2456 break; 2457 } 2458 } 2459 if (nextOrValue instanceof BaseParamWithPrefix) { 2460 BaseParamWithPrefix<?> paramWithPrefix = (BaseParamWithPrefix<?>) nextOrValue; 2461 ParamPrefixEnum prefix = paramWithPrefix.getPrefix(); 2462 // A parameter with the 'eq' prefix is the only accepted prefix when combo searching since 2463 // birthdate=2025-01-01 and birthdate=eq2025-01-01 are equivalent searches. 2464 if (prefix != null && prefix != EQUAL) { 2465 String message = "Search with params " + theComboParamNames 2466 + " is not a candidate for combo searching - Parameter '" + nextParamName 2467 + "' has prefix: '" 2468 + paramWithPrefix.getPrefix().getValue() + "'"; 2469 firePerformanceInfo(theRequest, message); 2470 paramValuesAreValidForCombo = false; 2471 break; 2472 } 2473 } 2474 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2475 String message = "Search with params " + theComboParamNames 2476 + " is not a candidate for combo searching - Parameter '" + nextParamName 2477 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2478 firePerformanceInfo(theRequest, message); 2479 paramValuesAreValidForCombo = false; 2480 break; 2481 } 2482 } 2483 2484 // Reference params are only eligible for using a composite index if they 2485 // are qualified 2486 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2487 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2488 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2489 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2490 if (isBlank(param.getResourceType())) { 2491 ourLog.debug( 2492 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2493 paramValuesAreValidForCombo = false; 2494 break; 2495 } 2496 } 2497 2498 // Date params are not eligible for using composite unique index 2499 // as index could contain date with different precision (e.g. DAY, SECOND) 2500 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2501 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2502 ourLog.debug( 2503 "Search with params {} is not a candidate for combo searching - " 2504 + "Unique combo search parameter '{}' has DATE type", 2505 theComboParamNames, 2506 nextParamName); 2507 paramValuesAreValidForCombo = false; 2508 break; 2509 } 2510 } 2511 2512 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2513 ourLog.debug( 2514 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2515 paramValuesAreValidForCombo = false; 2516 } 2517 2518 return paramValuesAreValidForCombo; 2519 } 2520 2521 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2522 for (int i = 0; i < theListOfLists.size(); i++) { 2523 List<T> oldSubList = theListOfLists.get(i); 2524 if (!(oldSubList instanceof ArrayList)) { 2525 List<T> newSubList = new ArrayList<>(oldSubList); 2526 theListOfLists.set(i, newSubList); 2527 } 2528 } 2529 } 2530 2531 @Override 2532 public void setFetchSize(int theFetchSize) { 2533 myFetchSize = theFetchSize; 2534 } 2535 2536 public SearchParameterMap getParams() { 2537 return myParams; 2538 } 2539 2540 public CriteriaBuilder getBuilder() { 2541 return myCriteriaBuilder; 2542 } 2543 2544 public Class<? extends IBaseResource> getResourceType() { 2545 return myResourceType; 2546 } 2547 2548 public String getResourceName() { 2549 return myResourceName; 2550 } 2551 2552 /** 2553 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2554 */ 2555 public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2556 2557 private final RequestDetails myRequest; 2558 private final Set<JpaPid> myCurrentPids; 2559 private Iterator<JpaPid> myCurrentIterator; 2560 private JpaPid myNext; 2561 2562 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2563 myCurrentPids = new HashSet<>(thePidSet); 2564 myCurrentIterator = null; 2565 myRequest = theRequest; 2566 } 2567 2568 private void fetchNext() { 2569 while (myNext == null) { 2570 2571 if (myCurrentIterator == null) { 2572 Set<Include> includes = new HashSet<>(); 2573 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2574 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2575 for (IQueryParameterType type : typeList) { 2576 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2577 for (String resourceType : queryString.split(",")) { 2578 String rt = resourceType.trim(); 2579 if (isNotBlank(rt)) { 2580 includes.add(new Include(rt + ":*", true)); 2581 } 2582 } 2583 } 2584 } 2585 } 2586 if (includes.isEmpty()) { 2587 includes.add(new Include("*", true)); 2588 } 2589 Set<JpaPid> newPids = loadIncludes( 2590 myContext, 2591 myEntityManager, 2592 myCurrentPids, 2593 includes, 2594 false, 2595 getParams().getLastUpdated(), 2596 mySearchUuid, 2597 myRequest, 2598 null); 2599 myCurrentIterator = newPids.iterator(); 2600 } 2601 2602 if (myCurrentIterator.hasNext()) { 2603 myNext = myCurrentIterator.next(); 2604 } else { 2605 myNext = NO_MORE; 2606 } 2607 } 2608 } 2609 2610 @Override 2611 public boolean hasNext() { 2612 fetchNext(); 2613 return !NO_MORE.equals(myNext); 2614 } 2615 2616 @Override 2617 public JpaPid next() { 2618 fetchNext(); 2619 JpaPid retVal = myNext; 2620 myNext = null; 2621 return retVal; 2622 } 2623 } 2624 2625 /** 2626 * Basic Query iterator, used to fetch the results of a query. 2627 */ 2628 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2629 2630 private final SearchRuntimeDetails mySearchRuntimeDetails; 2631 private final RequestDetails myRequest; 2632 private final boolean myHaveRawSqlHooks; 2633 private final boolean myHavePerfTraceFoundIdHook; 2634 private final SortSpec mySort; 2635 private final Integer myOffset; 2636 private final IInterceptorBroadcaster myCompositeBroadcaster; 2637 private boolean myFirst = true; 2638 private IncludesIterator myIncludesIterator; 2639 /** 2640 * The next JpaPid value of the next result in this query. 2641 * Will not be null if fetched using getNext() 2642 */ 2643 private JpaPid myNext; 2644 /** 2645 * The current query result iterator running sql and supplying PIDs 2646 * @see #myQueryList 2647 */ 2648 private ISearchQueryExecutor myResultsIterator; 2649 2650 private boolean myFetchIncludesForEverythingOperation; 2651 /** 2652 * The count of resources skipped because they were seen in earlier results 2653 */ 2654 private int mySkipCount = 0; 2655 /** 2656 * The count of resources that are new in this search 2657 * (ie, not cached in previous searches) 2658 */ 2659 private int myNonSkipCount = 0; 2660 2661 /** 2662 * The list of queries to use to find all results. 2663 * Normal JPA queries will normally have a single entry. 2664 * Queries that involve Hibernate Search/Elastisearch may have 2665 * multiple queries because of chunking. 2666 * The $everything operation also jams some extra results in. 2667 */ 2668 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2669 2670 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2671 mySearchRuntimeDetails = theSearchRuntimeDetails; 2672 mySort = myParams.getSort(); 2673 myOffset = myParams.getOffset(); 2674 myRequest = theRequest; 2675 myCompositeBroadcaster = 2676 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2677 2678 // everything requires fetching recursively all related resources 2679 if (myParams.getEverythingMode() != null) { 2680 myFetchIncludesForEverythingOperation = true; 2681 } 2682 2683 myHavePerfTraceFoundIdHook = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID); 2684 myHaveRawSqlHooks = myCompositeBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_RAW_SQL); 2685 } 2686 2687 private void fetchNext() { 2688 try { 2689 if (myHaveRawSqlHooks) { 2690 CurrentThreadCaptureQueriesListener.startCapturing(); 2691 } 2692 2693 // If we don't have a query yet, create one 2694 if (myResultsIterator == null) { 2695 if (!mySearchProperties.hasMaxResultsRequested()) { 2696 mySearchProperties.setMaxResultsRequested(calculateMaxResultsToFetch()); 2697 } 2698 2699 /* 2700 * assigns the results iterator 2701 * and populates the myQueryList. 2702 */ 2703 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2704 } 2705 2706 if (myNext == null) { 2707 // no next means we need a new query (if one is available) 2708 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2709 /* 2710 * Because we combine our DB searches with Lucene 2711 * sometimes we can have multiple results iterators 2712 * (with only some having data in them to extract). 2713 * 2714 * We'll iterate our results iterators until we 2715 * either run out of results iterators, or we 2716 * have one that actually has data in it. 2717 */ 2718 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2719 retrieveNextIteratorQuery(); 2720 } 2721 2722 if (!myResultsIterator.hasNext()) { 2723 // we couldn't find a results iterator; 2724 // we're done here 2725 break; 2726 } 2727 2728 JpaPid nextPid = myResultsIterator.next(); 2729 if (myHavePerfTraceFoundIdHook) { 2730 callPerformanceTracingHook(nextPid); 2731 } 2732 2733 if (nextPid != null) { 2734 if (!myPidSet.contains(nextPid)) { 2735 if (!mySearchProperties.isDeduplicateInDatabase()) { 2736 /* 2737 * We only add to the map if we aren't fetching "everything"; 2738 * otherwise, we let the de-duplication happen in the database 2739 * (see createChunkedQueryNormalSearch above), because it 2740 * saves memory that way. 2741 */ 2742 myPidSet.add(nextPid); 2743 } 2744 if (doNotSkipNextPidForEverything()) { 2745 myNext = nextPid; 2746 myNonSkipCount++; 2747 break; 2748 } 2749 } else { 2750 mySkipCount++; 2751 } 2752 } 2753 2754 if (!myResultsIterator.hasNext()) { 2755 if (mySearchProperties.hasMaxResultsRequested() 2756 && (mySkipCount + myNonSkipCount == mySearchProperties.getMaxResultsRequested())) { 2757 if (mySkipCount > 0 && myNonSkipCount == 0) { 2758 sendProcessingMsgAndFirePerformanceHook(); 2759 // need the next iterator; increase the maxsize 2760 // (we should always do this) 2761 int maxResults = mySearchProperties.getMaxResultsRequested() + 1000; 2762 mySearchProperties.setMaxResultsRequested(maxResults); 2763 2764 if (!mySearchProperties.isDeduplicateInDatabase()) { 2765 // if we're not using the database to deduplicate 2766 // we should recheck our memory usage 2767 // the prefetch size check is future proofing 2768 int prefetchSize = myStorageSettings 2769 .getSearchPreFetchThresholds() 2770 .size(); 2771 if (prefetchSize > 0) { 2772 if (myStorageSettings 2773 .getSearchPreFetchThresholds() 2774 .get(prefetchSize - 1) 2775 < mySearchProperties.getMaxResultsRequested()) { 2776 mySearchProperties.setDeduplicateInDatabase(true); 2777 } 2778 } 2779 } 2780 2781 initializeIteratorQuery(myOffset, mySearchProperties.getMaxResultsRequested()); 2782 } 2783 } 2784 } 2785 } 2786 } 2787 2788 if (myNext == null) { 2789 // if we got here, it means the current JpaPid has already been processed, 2790 // and we will decide (here) if we need to fetch related resources recursively 2791 if (myFetchIncludesForEverythingOperation) { 2792 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2793 myFetchIncludesForEverythingOperation = false; 2794 } 2795 if (myIncludesIterator != null) { 2796 while (myIncludesIterator.hasNext()) { 2797 JpaPid next = myIncludesIterator.next(); 2798 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2799 myNext = next; 2800 break; 2801 } 2802 } 2803 if (myNext == null) { 2804 myNext = NO_MORE; 2805 } 2806 } else { 2807 myNext = NO_MORE; 2808 } 2809 } 2810 2811 if (!mySearchProperties.hasMaxResultsRequested()) { 2812 mySearchRuntimeDetails.setFoundIndexMatchesCount(myNonSkipCount); 2813 } else { 2814 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2815 } 2816 2817 } finally { 2818 // search finished - fire hooks 2819 if (myHaveRawSqlHooks) { 2820 callRawSqlHookWithCurrentThreadQueries(myRequest, myCompositeBroadcaster); 2821 } 2822 } 2823 2824 if (myFirst) { 2825 HookParams params = new HookParams() 2826 .add(RequestDetails.class, myRequest) 2827 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2828 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2829 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2830 myFirst = false; 2831 } 2832 2833 if (NO_MORE.equals(myNext)) { 2834 HookParams params = new HookParams() 2835 .add(RequestDetails.class, myRequest) 2836 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2837 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2838 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2839 } 2840 } 2841 2842 private Integer calculateMaxResultsToFetch() { 2843 if (myParams.getLoadSynchronousUpTo() != null) { 2844 return myParams.getLoadSynchronousUpTo(); 2845 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2846 return myParams.getEverythingMode() != null 2847 ? myParams.getOffset() + myParams.getCount() 2848 : myParams.getCount(); 2849 } else { 2850 return myStorageSettings.getFetchSizeDefaultMaximum(); 2851 } 2852 } 2853 2854 private boolean doNotSkipNextPidForEverything() { 2855 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2856 } 2857 2858 private void callPerformanceTracingHook(JpaPid theNextPid) { 2859 HookParams params = new HookParams() 2860 .add(Integer.class, System.identityHashCode(this)) 2861 .add(Object.class, theNextPid); 2862 myCompositeBroadcaster.callHooks(Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2863 } 2864 2865 private void sendProcessingMsgAndFirePerformanceHook() { 2866 String msg = "Pass completed with no matching results seeking rows " 2867 + myPidSet.size() + "-" + mySkipCount 2868 + ". This indicates an inefficient query! Retrying with new max count of " 2869 + mySearchProperties.getMaxResultsRequested(); 2870 firePerformanceWarning(myRequest, msg); 2871 } 2872 2873 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2874 Integer offset = theOffset; 2875 if (myQueryList.isEmpty()) { 2876 // Capture times for Lucene/Elasticsearch queries as well 2877 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2878 2879 // setting offset to 0 to fetch all resource ids to guarantee 2880 // correct output result for everything operation during paging 2881 if (myParams.getEverythingMode() != null) { 2882 offset = 0; 2883 } 2884 2885 SearchQueryProperties properties = mySearchProperties.clone(); 2886 properties 2887 .setOffset(offset) 2888 .setMaxResultsRequested(theMaxResultsToFetch) 2889 .setDoCountOnlyFlag(false) 2890 .setDeduplicateInDatabase(properties.isDeduplicateInDatabase() || offset != null); 2891 myQueryList = createQuery(myParams, properties, myRequest, mySearchRuntimeDetails); 2892 } 2893 2894 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2895 2896 retrieveNextIteratorQuery(); 2897 2898 mySkipCount = 0; 2899 myNonSkipCount = 0; 2900 } 2901 2902 private void retrieveNextIteratorQuery() { 2903 close(); 2904 if (isNotEmpty(myQueryList)) { 2905 myResultsIterator = myQueryList.remove(0); 2906 myHasNextIteratorQuery = true; 2907 } else { 2908 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2909 myHasNextIteratorQuery = false; 2910 } 2911 } 2912 2913 @Override 2914 public boolean hasNext() { 2915 if (myNext == null) { 2916 fetchNext(); 2917 } 2918 return !NO_MORE.equals(myNext); 2919 } 2920 2921 @Override 2922 public JpaPid next() { 2923 fetchNext(); 2924 JpaPid retVal = myNext; 2925 myNext = null; 2926 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2927 return retVal; 2928 } 2929 2930 @Override 2931 public int getSkippedCount() { 2932 return mySkipCount; 2933 } 2934 2935 @Override 2936 public int getNonSkippedCount() { 2937 return myNonSkipCount; 2938 } 2939 2940 @Override 2941 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2942 Collection<JpaPid> batch = new ArrayList<>(); 2943 while (this.hasNext() && batch.size() < theBatchSize) { 2944 batch.add(this.next()); 2945 } 2946 return batch; 2947 } 2948 2949 @Override 2950 public void close() { 2951 if (myResultsIterator != null) { 2952 myResultsIterator.close(); 2953 } 2954 myResultsIterator = null; 2955 } 2956 } 2957 2958 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2959 // Only log at debug level since these messages aren't considered important enough 2960 // that we should be cluttering the system log, but they are important to the 2961 // specific query being executed to we'll INFO level them there 2962 ourLog.debug(theMessage); 2963 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2964 } 2965 2966 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2967 ourLog.warn(theMessage); 2968 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2969 } 2970 2971 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut thePointcut) { 2972 IInterceptorBroadcaster compositeBroadcaster = 2973 CompositeInterceptorBroadcaster.newCompositeBroadcaster(myInterceptorBroadcaster, theRequest); 2974 if (compositeBroadcaster.hasHooks(thePointcut)) { 2975 StorageProcessingMessage message = new StorageProcessingMessage(); 2976 message.setMessage(theMessage); 2977 HookParams params = new HookParams() 2978 .add(RequestDetails.class, theRequest) 2979 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2980 .add(StorageProcessingMessage.class, message); 2981 compositeBroadcaster.callHooks(thePointcut, params); 2982 } 2983 } 2984 2985 public static int getMaximumPageSize() { 2986 if (myMaxPageSizeForTests != null) { 2987 return myMaxPageSizeForTests; 2988 } 2989 return MAXIMUM_PAGE_SIZE; 2990 } 2991 2992 public static void setMaxPageSizeForTest(Integer theTestSize) { 2993 myMaxPageSizeForTests = theTestSize; 2994 } 2995}