001/* 002 * #%L 003 * HAPI FHIR JPA Server 004 * %% 005 * Copyright (C) 2014 - 2024 Smile CDR, Inc. 006 * %% 007 * Licensed under the Apache License, Version 2.0 (the "License"); 008 * you may not use this file except in compliance with the License. 009 * You may obtain a copy of the License at 010 * 011 * http://www.apache.org/licenses/LICENSE-2.0 012 * 013 * Unless required by applicable law or agreed to in writing, software 014 * distributed under the License is distributed on an "AS IS" BASIS, 015 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 016 * See the License for the specific language governing permissions and 017 * limitations under the License. 018 * #L% 019 */ 020package ca.uhn.fhir.jpa.search.builder; 021 022import ca.uhn.fhir.context.BaseRuntimeChildDefinition; 023import ca.uhn.fhir.context.BaseRuntimeElementDefinition; 024import ca.uhn.fhir.context.ComboSearchParamType; 025import ca.uhn.fhir.context.FhirContext; 026import ca.uhn.fhir.context.FhirVersionEnum; 027import ca.uhn.fhir.context.RuntimeResourceDefinition; 028import ca.uhn.fhir.context.RuntimeSearchParam; 029import ca.uhn.fhir.i18n.Msg; 030import ca.uhn.fhir.interceptor.api.HookParams; 031import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; 032import ca.uhn.fhir.interceptor.api.Pointcut; 033import ca.uhn.fhir.interceptor.model.RequestPartitionId; 034import ca.uhn.fhir.jpa.api.config.JpaStorageSettings; 035import ca.uhn.fhir.jpa.api.dao.DaoRegistry; 036import ca.uhn.fhir.jpa.api.dao.IDao; 037import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; 038import ca.uhn.fhir.jpa.api.svc.IIdHelperService; 039import ca.uhn.fhir.jpa.api.svc.ResolveIdentityMode; 040import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; 041import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; 042import ca.uhn.fhir.jpa.dao.BaseStorageDao; 043import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; 044import ca.uhn.fhir.jpa.dao.IJpaStorageResourceParser; 045import ca.uhn.fhir.jpa.dao.IResultIterator; 046import ca.uhn.fhir.jpa.dao.ISearchBuilder; 047import ca.uhn.fhir.jpa.dao.data.IResourceSearchViewDao; 048import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; 049import ca.uhn.fhir.jpa.dao.search.ResourceNotFoundInIndexException; 050import ca.uhn.fhir.jpa.entity.ResourceSearchView; 051import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails; 052import ca.uhn.fhir.jpa.model.config.PartitionSettings; 053import ca.uhn.fhir.jpa.model.cross.IResourceLookup; 054import ca.uhn.fhir.jpa.model.dao.JpaPid; 055import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; 056import ca.uhn.fhir.jpa.model.entity.IBaseResourceEntity; 057import ca.uhn.fhir.jpa.model.entity.ResourceTag; 058import ca.uhn.fhir.jpa.model.search.SearchBuilderLoadIncludesParameters; 059import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails; 060import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage; 061import ca.uhn.fhir.jpa.search.SearchConstants; 062import ca.uhn.fhir.jpa.search.builder.models.ResolvedSearchQueryExecutor; 063import ca.uhn.fhir.jpa.search.builder.sql.GeneratedSql; 064import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; 065import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor; 066import ca.uhn.fhir.jpa.search.builder.sql.SqlObjectFactory; 067import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc; 068import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; 069import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper; 070import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil; 071import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper; 072import ca.uhn.fhir.jpa.util.BaseIterator; 073import ca.uhn.fhir.jpa.util.CartesianProductUtil; 074import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; 075import ca.uhn.fhir.jpa.util.QueryChunker; 076import ca.uhn.fhir.jpa.util.SqlQueryList; 077import ca.uhn.fhir.model.api.IQueryParameterType; 078import ca.uhn.fhir.model.api.Include; 079import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; 080import ca.uhn.fhir.model.api.TemporalPrecisionEnum; 081import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum; 082import ca.uhn.fhir.rest.api.Constants; 083import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum; 084import ca.uhn.fhir.rest.api.SearchContainedModeEnum; 085import ca.uhn.fhir.rest.api.SortOrderEnum; 086import ca.uhn.fhir.rest.api.SortSpec; 087import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails; 088import ca.uhn.fhir.rest.api.server.RequestDetails; 089import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId; 090import ca.uhn.fhir.rest.param.BaseParamWithPrefix; 091import ca.uhn.fhir.rest.param.DateParam; 092import ca.uhn.fhir.rest.param.DateRangeParam; 093import ca.uhn.fhir.rest.param.ParameterUtil; 094import ca.uhn.fhir.rest.param.ReferenceParam; 095import ca.uhn.fhir.rest.param.StringParam; 096import ca.uhn.fhir.rest.param.TokenParam; 097import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; 098import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; 099import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; 100import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster; 101import ca.uhn.fhir.rest.server.util.ISearchParamRegistry; 102import ca.uhn.fhir.util.StopWatch; 103import ca.uhn.fhir.util.StringUtil; 104import ca.uhn.fhir.util.UrlUtil; 105import com.google.common.annotations.VisibleForTesting; 106import com.google.common.collect.Lists; 107import com.healthmarketscience.sqlbuilder.Condition; 108import jakarta.annotation.Nonnull; 109import jakarta.annotation.Nullable; 110import jakarta.persistence.EntityManager; 111import jakarta.persistence.PersistenceContext; 112import jakarta.persistence.PersistenceContextType; 113import jakarta.persistence.Query; 114import jakarta.persistence.Tuple; 115import jakarta.persistence.TypedQuery; 116import jakarta.persistence.criteria.CriteriaBuilder; 117import org.apache.commons.lang3.StringUtils; 118import org.apache.commons.lang3.Validate; 119import org.apache.commons.lang3.math.NumberUtils; 120import org.apache.commons.lang3.tuple.Pair; 121import org.hl7.fhir.instance.model.api.IAnyResource; 122import org.hl7.fhir.instance.model.api.IBaseResource; 123import org.hl7.fhir.instance.model.api.IIdType; 124import org.slf4j.Logger; 125import org.slf4j.LoggerFactory; 126import org.springframework.beans.factory.annotation.Autowired; 127import org.springframework.jdbc.core.JdbcTemplate; 128import org.springframework.jdbc.core.RowMapper; 129import org.springframework.transaction.support.TransactionSynchronizationManager; 130 131import java.sql.ResultSet; 132import java.sql.SQLException; 133import java.util.ArrayList; 134import java.util.Collection; 135import java.util.Collections; 136import java.util.Comparator; 137import java.util.HashMap; 138import java.util.HashSet; 139import java.util.Iterator; 140import java.util.LinkedList; 141import java.util.List; 142import java.util.Map; 143import java.util.Objects; 144import java.util.Set; 145import java.util.stream.Collectors; 146 147import static ca.uhn.fhir.jpa.model.util.JpaConstants.UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE; 148import static ca.uhn.fhir.jpa.search.builder.QueryStack.LOCATION_POSITION; 149import static ca.uhn.fhir.jpa.search.builder.QueryStack.SearchForIdsParams.with; 150import static ca.uhn.fhir.jpa.util.InClauseNormalizer.normalizeIdListForInClause; 151import static java.util.Objects.requireNonNull; 152import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; 153import static org.apache.commons.lang3.StringUtils.defaultString; 154import static org.apache.commons.lang3.StringUtils.isBlank; 155import static org.apache.commons.lang3.StringUtils.isNotBlank; 156 157/** 158 * The SearchBuilder is responsible for actually forming the SQL query that handles 159 * searches for resources 160 */ 161public class SearchBuilder implements ISearchBuilder<JpaPid> { 162 163 /** 164 * See loadResourcesByPid 165 * for an explanation of why we use the constant 800 166 */ 167 // NB: keep public 168 @Deprecated 169 public static final int MAXIMUM_PAGE_SIZE = SearchConstants.MAX_PAGE_SIZE; 170 171 public static final String RESOURCE_ID_ALIAS = "resource_id"; 172 public static final String RESOURCE_VERSION_ALIAS = "resource_version"; 173 private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class); 174 private static final JpaPid NO_MORE = JpaPid.fromId(-1L); 175 private static final String MY_TARGET_RESOURCE_PID = "myTargetResourcePid"; 176 private static final String MY_SOURCE_RESOURCE_PID = "mySourceResourcePid"; 177 private static final String MY_TARGET_RESOURCE_TYPE = "myTargetResourceType"; 178 private static final String MY_SOURCE_RESOURCE_TYPE = "mySourceResourceType"; 179 private static final String MY_TARGET_RESOURCE_VERSION = "myTargetResourceVersion"; 180 public static Integer myMaxPageSizeForTests = null; 181 protected final IInterceptorBroadcaster myInterceptorBroadcaster; 182 protected final IResourceTagDao myResourceTagDao; 183 private String myResourceName; 184 private final Class<? extends IBaseResource> myResourceType; 185 private final HapiFhirLocalContainerEntityManagerFactoryBean myEntityManagerFactory; 186 private final SqlObjectFactory mySqlBuilderFactory; 187 private final HibernatePropertiesProvider myDialectProvider; 188 private final ISearchParamRegistry mySearchParamRegistry; 189 private final PartitionSettings myPartitionSettings; 190 private final DaoRegistry myDaoRegistry; 191 private final IResourceSearchViewDao myResourceSearchViewDao; 192 private final FhirContext myContext; 193 private final IIdHelperService<JpaPid> myIdHelperService; 194 private final JpaStorageSettings myStorageSettings; 195 private final IDao myCallingDao; 196 197 @PersistenceContext(type = PersistenceContextType.TRANSACTION) 198 protected EntityManager myEntityManager; 199 200 private CriteriaBuilder myCriteriaBuilder; 201 private SearchParameterMap myParams; 202 private String mySearchUuid; 203 private int myFetchSize; 204 private Integer myMaxResultsToFetch; 205 private Set<JpaPid> myPidSet; 206 private boolean myHasNextIteratorQuery = false; 207 private RequestPartitionId myRequestPartitionId; 208 209 @Autowired(required = false) 210 private IFulltextSearchSvc myFulltextSearchSvc; 211 212 @Autowired(required = false) 213 private IElasticsearchSvc myIElasticsearchSvc; 214 215 @Autowired 216 private IJpaStorageResourceParser myJpaStorageResourceParser; 217 218 /** 219 * Constructor 220 */ 221 @SuppressWarnings({"rawtypes", "unchecked"}) 222 public SearchBuilder( 223 IDao theDao, 224 String theResourceName, 225 JpaStorageSettings theStorageSettings, 226 HapiFhirLocalContainerEntityManagerFactoryBean theEntityManagerFactory, 227 SqlObjectFactory theSqlBuilderFactory, 228 HibernatePropertiesProvider theDialectProvider, 229 ISearchParamRegistry theSearchParamRegistry, 230 PartitionSettings thePartitionSettings, 231 IInterceptorBroadcaster theInterceptorBroadcaster, 232 IResourceTagDao theResourceTagDao, 233 DaoRegistry theDaoRegistry, 234 IResourceSearchViewDao theResourceSearchViewDao, 235 FhirContext theContext, 236 IIdHelperService theIdHelperService, 237 Class<? extends IBaseResource> theResourceType) { 238 myCallingDao = theDao; 239 myResourceName = theResourceName; 240 myResourceType = theResourceType; 241 myStorageSettings = theStorageSettings; 242 243 myEntityManagerFactory = theEntityManagerFactory; 244 mySqlBuilderFactory = theSqlBuilderFactory; 245 myDialectProvider = theDialectProvider; 246 mySearchParamRegistry = theSearchParamRegistry; 247 myPartitionSettings = thePartitionSettings; 248 myInterceptorBroadcaster = theInterceptorBroadcaster; 249 myResourceTagDao = theResourceTagDao; 250 myDaoRegistry = theDaoRegistry; 251 myResourceSearchViewDao = theResourceSearchViewDao; 252 myContext = theContext; 253 myIdHelperService = theIdHelperService; 254 } 255 256 @VisibleForTesting 257 void setResourceName(String theName) { 258 myResourceName = theName; 259 } 260 261 @Override 262 public void setMaxResultsToFetch(Integer theMaxResultsToFetch) { 263 myMaxResultsToFetch = theMaxResultsToFetch; 264 } 265 266 private void searchForIdsWithAndOr( 267 SearchQueryBuilder theSearchSqlBuilder, 268 QueryStack theQueryStack, 269 @Nonnull SearchParameterMap theParams, 270 RequestDetails theRequest) { 271 myParams = theParams; 272 273 // Remove any empty parameters 274 theParams.clean(); 275 276 // For DSTU3, pull out near-distance first so when it comes time to evaluate near, we already know the distance 277 if (myContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) { 278 Dstu3DistanceHelper.setNearDistance(myResourceType, theParams); 279 } 280 281 // Attempt to lookup via composite unique key. 282 if (isCompositeUniqueSpCandidate()) { 283 attemptComboUniqueSpProcessing(theQueryStack, theParams, theRequest); 284 } 285 286 // Handle _id and _tag last, since they can typically be tacked onto a different parameter 287 List<String> paramNames = myParams.keySet().stream() 288 .filter(t -> !t.equals(IAnyResource.SP_RES_ID)) 289 .filter(t -> !t.equals(Constants.PARAM_TAG)) 290 .collect(Collectors.toList()); 291 if (myParams.containsKey(IAnyResource.SP_RES_ID)) { 292 paramNames.add(IAnyResource.SP_RES_ID); 293 } 294 if (myParams.containsKey(Constants.PARAM_TAG)) { 295 paramNames.add(Constants.PARAM_TAG); 296 } 297 298 // Handle each parameter 299 for (String nextParamName : paramNames) { 300 if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) { 301 // Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by 302 // Elasticsearch 303 continue; 304 } 305 List<List<IQueryParameterType>> andOrParams = myParams.get(nextParamName); 306 Condition predicate = theQueryStack.searchForIdsWithAndOr(with().setResourceName(myResourceName) 307 .setParamName(nextParamName) 308 .setAndOrParams(andOrParams) 309 .setRequest(theRequest) 310 .setRequestPartitionId(myRequestPartitionId)); 311 if (predicate != null) { 312 theSearchSqlBuilder.addPredicate(predicate); 313 } 314 } 315 } 316 317 /** 318 * A search is a candidate for Composite Unique SP if unique indexes are enabled, there is no EverythingMode, and the 319 * parameters all have no modifiers. 320 */ 321 private boolean isCompositeUniqueSpCandidate() { 322 return myStorageSettings.isUniqueIndexesEnabled() && myParams.getEverythingMode() == null; 323 } 324 325 @SuppressWarnings("ConstantConditions") 326 @Override 327 public Long createCountQuery( 328 SearchParameterMap theParams, 329 String theSearchUuid, 330 RequestDetails theRequest, 331 @Nonnull RequestPartitionId theRequestPartitionId) { 332 333 assert theRequestPartitionId != null; 334 assert TransactionSynchronizationManager.isActualTransactionActive(); 335 336 init(theParams, theSearchUuid, theRequestPartitionId); 337 338 if (checkUseHibernateSearch()) { 339 return myFulltextSearchSvc.count(myResourceName, theParams.clone()); 340 } 341 342 List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null); 343 if (queries.isEmpty()) { 344 return 0L; 345 } else { 346 return queries.get(0).next(); 347 } 348 } 349 350 /** 351 * @param thePidSet May be null 352 */ 353 @Override 354 public void setPreviouslyAddedResourcePids(@Nonnull List<JpaPid> thePidSet) { 355 myPidSet = new HashSet<>(thePidSet); 356 } 357 358 @SuppressWarnings("ConstantConditions") 359 @Override 360 public IResultIterator<JpaPid> createQuery( 361 SearchParameterMap theParams, 362 SearchRuntimeDetails theSearchRuntimeDetails, 363 RequestDetails theRequest, 364 @Nonnull RequestPartitionId theRequestPartitionId) { 365 assert theRequestPartitionId != null; 366 assert TransactionSynchronizationManager.isActualTransactionActive(); 367 368 init(theParams, theSearchRuntimeDetails.getSearchUuid(), theRequestPartitionId); 369 370 if (myPidSet == null) { 371 myPidSet = new HashSet<>(); 372 } 373 374 return new QueryIterator(theSearchRuntimeDetails, theRequest); 375 } 376 377 private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) { 378 myCriteriaBuilder = myEntityManager.getCriteriaBuilder(); 379 // we mutate the params. Make a private copy. 380 myParams = theParams.clone(); 381 mySearchUuid = theSearchUuid; 382 myRequestPartitionId = theRequestPartitionId; 383 } 384 385 private List<ISearchQueryExecutor> createQuery( 386 SearchParameterMap theParams, 387 SortSpec sort, 388 Integer theOffset, 389 Integer theMaximumResults, 390 boolean theCountOnlyFlag, 391 RequestDetails theRequest, 392 SearchRuntimeDetails theSearchRuntimeDetails) { 393 394 ArrayList<ISearchQueryExecutor> queries = new ArrayList<>(); 395 396 if (checkUseHibernateSearch()) { 397 // we're going to run at least part of the search against the Fulltext service. 398 399 // Ugh - we have two different return types for now 400 ISearchQueryExecutor fulltextExecutor = null; 401 List<JpaPid> fulltextMatchIds = null; 402 int resultCount = 0; 403 if (myParams.isLastN()) { 404 fulltextMatchIds = executeLastNAgainstIndex(theMaximumResults); 405 resultCount = fulltextMatchIds.size(); 406 } else if (myParams.getEverythingMode() != null) { 407 fulltextMatchIds = queryHibernateSearchForEverythingPids(theRequest); 408 resultCount = fulltextMatchIds.size(); 409 } else { 410 // todo performance MB - some queries must intersect with JPA (e.g. they have a chain, or we haven't 411 // enabled SP indexing). 412 // and some queries don't need JPA. We only need the scroll when we need to intersect with JPA. 413 // It would be faster to have a non-scrolled search in this case, since creating the scroll requires 414 // extra work in Elastic. 415 // if (eligibleToSkipJPAQuery) fulltextExecutor = myFulltextSearchSvc.searchNotScrolled( ... 416 417 // we might need to intersect with JPA. So we might need to traverse ALL results from lucene, not just 418 // a page. 419 fulltextExecutor = myFulltextSearchSvc.searchScrolled(myResourceName, myParams, theRequest); 420 } 421 422 if (fulltextExecutor == null) { 423 fulltextExecutor = 424 SearchQueryExecutors.from(fulltextMatchIds != null ? fulltextMatchIds : new ArrayList<>()); 425 } 426 427 if (theSearchRuntimeDetails != null) { 428 theSearchRuntimeDetails.setFoundIndexMatchesCount(resultCount); 429 HookParams params = new HookParams() 430 .add(RequestDetails.class, theRequest) 431 .addIfMatchesType(ServletRequestDetails.class, theRequest) 432 .add(SearchRuntimeDetails.class, theSearchRuntimeDetails); 433 CompositeInterceptorBroadcaster.doCallHooks( 434 myInterceptorBroadcaster, 435 theRequest, 436 Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, 437 params); 438 } 439 440 // can we skip the database entirely and return the pid list from here? 441 boolean canSkipDatabase = 442 // if we processed an AND clause, and it returned nothing, then nothing can match. 443 !fulltextExecutor.hasNext() 444 || 445 // Our hibernate search query doesn't respect partitions yet 446 (!myPartitionSettings.isPartitioningEnabled() 447 && 448 // were there AND terms left? Then we still need the db. 449 theParams.isEmpty() 450 && 451 // not every param is a param. :-( 452 theParams.getNearDistanceParam() == null 453 && 454 // todo MB don't we support _lastUpdated and _offset now? 455 theParams.getLastUpdated() == null 456 && theParams.getEverythingMode() == null 457 && theParams.getOffset() == null); 458 459 if (canSkipDatabase) { 460 ourLog.trace("Query finished after HSearch. Skip db query phase"); 461 if (theMaximumResults != null) { 462 fulltextExecutor = SearchQueryExecutors.limited(fulltextExecutor, theMaximumResults); 463 } 464 queries.add(fulltextExecutor); 465 } else { 466 ourLog.trace("Query needs db after HSearch. Chunking."); 467 // Finish the query in the database for the rest of the search parameters, sorting, partitioning, etc. 468 // We break the pids into chunks that fit in the 1k limit for jdbc bind params. 469 new QueryChunker<Long>() 470 .chunk( 471 fulltextExecutor, 472 SearchBuilder.getMaximumPageSize(), 473 // for each list of (SearchBuilder.getMaximumPageSize()) 474 // we create a chunked query and add it to 'queries' 475 t -> doCreateChunkedQueries( 476 theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries)); 477 } 478 } else { 479 // do everything in the database. 480 createChunkedQuery( 481 theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null, queries); 482 } 483 484 return queries; 485 } 486 487 /** 488 * Check to see if query should use Hibernate Search, and error if the query can't continue. 489 * 490 * @return true if the query should first be processed by Hibernate Search 491 * @throws InvalidRequestException if fulltext search is not enabled but the query requires it - _content or _text 492 */ 493 private boolean checkUseHibernateSearch() { 494 boolean fulltextEnabled = (myFulltextSearchSvc != null) && !myFulltextSearchSvc.isDisabled(); 495 496 if (!fulltextEnabled) { 497 failIfUsed(Constants.PARAM_TEXT); 498 failIfUsed(Constants.PARAM_CONTENT); 499 } else { 500 for (SortSpec sortSpec : myParams.getAllChainsInOrder()) { 501 final String paramName = sortSpec.getParamName(); 502 if (paramName.contains(".")) { 503 failIfUsedWithChainedSort(Constants.PARAM_TEXT); 504 failIfUsedWithChainedSort(Constants.PARAM_CONTENT); 505 } 506 } 507 } 508 509 // someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we 510 // can. 511 return fulltextEnabled 512 && myParams != null 513 && myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE 514 && myFulltextSearchSvc.canUseHibernateSearch(myResourceName, myParams) 515 && myFulltextSearchSvc.supportsAllSortTerms(myResourceName, myParams); 516 } 517 518 private void failIfUsed(String theParamName) { 519 if (myParams.containsKey(theParamName)) { 520 throw new InvalidRequestException(Msg.code(1192) 521 + "Fulltext search is not enabled on this service, can not process parameter: " + theParamName); 522 } 523 } 524 525 private void failIfUsedWithChainedSort(String theParamName) { 526 if (myParams.containsKey(theParamName)) { 527 throw new InvalidRequestException(Msg.code(2524) 528 + "Fulltext search combined with chained sorts are not supported, can not process parameter: " 529 + theParamName); 530 } 531 } 532 533 private List<JpaPid> executeLastNAgainstIndex(Integer theMaximumResults) { 534 // Can we use our hibernate search generated index on resource to support lastN?: 535 if (myStorageSettings.isAdvancedHSearchIndexing()) { 536 if (myFulltextSearchSvc == null) { 537 throw new InvalidRequestException(Msg.code(2027) 538 + "LastN operation is not enabled on this service, can not process this request"); 539 } 540 List<IResourcePersistentId> persistentIds = myFulltextSearchSvc.lastN(myParams, theMaximumResults); 541 return persistentIds.stream().map(t -> (JpaPid) t).collect(Collectors.toList()); 542 } else { 543 throw new InvalidRequestException( 544 Msg.code(2033) + "LastN operation is not enabled on this service, can not process this request"); 545 } 546 } 547 548 private List<JpaPid> queryHibernateSearchForEverythingPids(RequestDetails theRequestDetails) { 549 JpaPid pid = null; 550 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 551 String idParamValue; 552 IQueryParameterType idParam = 553 myParams.get(IAnyResource.SP_RES_ID).get(0).get(0); 554 if (idParam instanceof TokenParam) { 555 TokenParam idParm = (TokenParam) idParam; 556 idParamValue = idParm.getValue(); 557 } else { 558 StringParam idParm = (StringParam) idParam; 559 idParamValue = idParm.getValue(); 560 } 561 562 pid = myIdHelperService 563 .resolveResourceIdentity( 564 myRequestPartitionId, 565 myResourceName, 566 idParamValue, 567 ResolveIdentityMode.includeDeleted().cacheOk()) 568 .getPersistentId(); 569 } 570 return myFulltextSearchSvc.everything(myResourceName, myParams, pid, theRequestDetails); 571 } 572 573 private void doCreateChunkedQueries( 574 SearchParameterMap theParams, 575 List<Long> thePids, 576 Integer theOffset, 577 SortSpec sort, 578 boolean theCount, 579 RequestDetails theRequest, 580 ArrayList<ISearchQueryExecutor> theQueries) { 581 582 if (thePids.size() < getMaximumPageSize()) { 583 thePids = normalizeIdListForInClause(thePids); 584 } 585 createChunkedQuery(theParams, sort, theOffset, thePids.size(), theCount, theRequest, thePids, theQueries); 586 } 587 588 /** 589 * Combs through the params for any _id parameters and extracts the PIDs for them 590 */ 591 private void extractTargetPidsFromIdParams(Set<Long> theTargetPids) { 592 // get all the IQueryParameterType objects 593 // for _id -> these should all be StringParam values 594 HashSet<IIdType> ids = new HashSet<>(); 595 List<List<IQueryParameterType>> params = myParams.get(IAnyResource.SP_RES_ID); 596 for (List<IQueryParameterType> paramList : params) { 597 for (IQueryParameterType param : paramList) { 598 String id; 599 if (param instanceof StringParam) { 600 // we expect all _id values to be StringParams 601 id = ((StringParam) param).getValue(); 602 } else if (param instanceof TokenParam) { 603 id = ((TokenParam) param).getValue(); 604 } else { 605 // we do not expect the _id parameter to be a non-string value 606 throw new IllegalArgumentException( 607 Msg.code(1193) + "_id parameter must be a StringParam or TokenParam"); 608 } 609 610 IIdType idType = myContext.getVersion().newIdType(); 611 if (id.contains("/")) { 612 idType.setValue(id); 613 } else { 614 idType.setValue(myResourceName + "/" + id); 615 } 616 ids.add(idType); 617 } 618 } 619 620 // fetch our target Pids 621 // this will throw if an id is not found 622 Map<IIdType, IResourceLookup<JpaPid>> idToIdentity = myIdHelperService.resolveResourceIdentities( 623 myRequestPartitionId, 624 new ArrayList<>(ids), 625 ResolveIdentityMode.failOnDeleted().noCacheUnlessDeletesDisabled()); 626 627 // add the pids to targetPids 628 for (IResourceLookup pid : idToIdentity.values()) { 629 theTargetPids.add((Long) pid.getPersistentId().getId()); 630 } 631 } 632 633 private void createChunkedQuery( 634 SearchParameterMap theParams, 635 SortSpec sort, 636 Integer theOffset, 637 Integer theMaximumResults, 638 boolean theCountOnlyFlag, 639 RequestDetails theRequest, 640 List<Long> thePidList, 641 List<ISearchQueryExecutor> theSearchQueryExecutors) { 642 if (myParams.getEverythingMode() != null) { 643 createChunkedQueryForEverythingSearch( 644 theParams, theOffset, theMaximumResults, theCountOnlyFlag, thePidList, theSearchQueryExecutors); 645 } else { 646 createChunkedQueryNormalSearch( 647 theParams, sort, theOffset, theCountOnlyFlag, theRequest, thePidList, theSearchQueryExecutors); 648 } 649 } 650 651 private void createChunkedQueryNormalSearch( 652 SearchParameterMap theParams, 653 SortSpec sort, 654 Integer theOffset, 655 boolean theCountOnlyFlag, 656 RequestDetails theRequest, 657 List<Long> thePidList, 658 List<ISearchQueryExecutor> theSearchQueryExecutors) { 659 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 660 myContext, 661 myStorageSettings, 662 myPartitionSettings, 663 myRequestPartitionId, 664 myResourceName, 665 mySqlBuilderFactory, 666 myDialectProvider, 667 theCountOnlyFlag); 668 QueryStack queryStack3 = new QueryStack( 669 theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); 670 671 if (theParams.keySet().size() > 1 672 || theParams.getSort() != null 673 || theParams.keySet().contains(Constants.PARAM_HAS) 674 || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) { 675 List<RuntimeSearchParam> activeComboParams = mySearchParamRegistry.getActiveComboSearchParams( 676 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 677 if (activeComboParams.isEmpty()) { 678 sqlBuilder.setNeedResourceTableRoot(true); 679 } 680 } 681 682 /* 683 * If we're doing a filter, always use the resource table as the root - This avoids the possibility of 684 * specific filters with ORs as their root from working around the natural resource type / deletion 685 * status / partition IDs built into queries. 686 */ 687 if (theParams.containsKey(Constants.PARAM_FILTER)) { 688 Condition partitionIdPredicate = sqlBuilder 689 .getOrCreateResourceTablePredicateBuilder() 690 .createPartitionIdPredicate(myRequestPartitionId); 691 if (partitionIdPredicate != null) { 692 sqlBuilder.addPredicate(partitionIdPredicate); 693 } 694 } 695 696 // Normal search 697 searchForIdsWithAndOr(sqlBuilder, queryStack3, myParams, theRequest); 698 699 // If we haven't added any predicates yet, we're doing a search for all resources. Make sure we add the 700 // partition ID predicate in that case. 701 if (!sqlBuilder.haveAtLeastOnePredicate()) { 702 Condition partitionIdPredicate = sqlBuilder 703 .getOrCreateResourceTablePredicateBuilder() 704 .createPartitionIdPredicate(myRequestPartitionId); 705 if (partitionIdPredicate != null) { 706 sqlBuilder.addPredicate(partitionIdPredicate); 707 } 708 } 709 710 // Add PID list predicate for full text search and/or lastn operation 711 addPidListPredicate(thePidList, sqlBuilder); 712 713 // Last updated 714 addLastUpdatePredicate(sqlBuilder); 715 716 /* 717 * Exclude the pids already in the previous iterator. This is an optimization, as opposed 718 * to something needed to guarantee correct results. 719 * 720 * Why do we need it? Suppose for example, a query like: 721 * Observation?category=foo,bar,baz 722 * And suppose you have many resources that have all 3 of these category codes. In this case 723 * the SQL query will probably return the same PIDs multiple times, and if this happens enough 724 * we may exhaust the query results without getting enough distinct results back. When that 725 * happens we re-run the query with a larger limit. Excluding results we already know about 726 * tries to ensure that we get new unique results. 727 * 728 * The challenge with that though is that lots of DBs have an issue with too many 729 * parameters in one query. So we only do this optimization if there aren't too 730 * many results. 731 */ 732 if (myHasNextIteratorQuery) { 733 if (myPidSet.size() + sqlBuilder.countBindVariables() < 900) { 734 sqlBuilder.excludeResourceIdsPredicate(myPidSet); 735 } 736 } 737 738 /* 739 * If offset is present, we want deduplicate the results by using GROUP BY 740 */ 741 if (theOffset != null) { 742 queryStack3.addGrouping(); 743 queryStack3.setUseAggregate(true); 744 } 745 746 /* 747 * Sort 748 * 749 * If we have a sort, we wrap the criteria search (the search that actually 750 * finds the appropriate resources) in an outer search which is then sorted 751 */ 752 if (sort != null) { 753 assert !theCountOnlyFlag; 754 755 createSort(queryStack3, sort, theParams); 756 } 757 758 /* 759 * Now perform the search 760 */ 761 executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder); 762 } 763 764 private void executeSearch( 765 Integer theOffset, List<ISearchQueryExecutor> theSearchQueryExecutors, SearchQueryBuilder sqlBuilder) { 766 GeneratedSql generatedSql = sqlBuilder.generate(theOffset, myMaxResultsToFetch); 767 if (!generatedSql.isMatchNothing()) { 768 SearchQueryExecutor executor = 769 mySqlBuilderFactory.newSearchQueryExecutor(generatedSql, myMaxResultsToFetch); 770 theSearchQueryExecutors.add(executor); 771 } 772 } 773 774 private void createChunkedQueryForEverythingSearch( 775 SearchParameterMap theParams, 776 Integer theOffset, 777 Integer theMaximumResults, 778 boolean theCountOnlyFlag, 779 List<Long> thePidList, 780 List<ISearchQueryExecutor> theSearchQueryExecutors) { 781 782 SearchQueryBuilder sqlBuilder = new SearchQueryBuilder( 783 myContext, 784 myStorageSettings, 785 myPartitionSettings, 786 myRequestPartitionId, 787 null, 788 mySqlBuilderFactory, 789 myDialectProvider, 790 theCountOnlyFlag); 791 792 QueryStack queryStack3 = new QueryStack( 793 theParams, myStorageSettings, myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings); 794 795 JdbcTemplate jdbcTemplate = initializeJdbcTemplate(theMaximumResults); 796 797 Set<Long> targetPids = new HashSet<>(); 798 if (myParams.get(IAnyResource.SP_RES_ID) != null) { 799 800 extractTargetPidsFromIdParams(targetPids); 801 802 // add the target pids to our executors as the first 803 // results iterator to go through 804 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(new ArrayList<>(targetPids))); 805 } else { 806 // For Everything queries, we make the query root by the ResourceLink table, since this query 807 // is basically a reverse-include search. For type/Everything (as opposed to instance/Everything) 808 // the one problem with this approach is that it doesn't catch Patients that have absolutely 809 // nothing linked to them. So we do one additional query to make sure we catch those too. 810 SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder( 811 myContext, 812 myStorageSettings, 813 myPartitionSettings, 814 myRequestPartitionId, 815 myResourceName, 816 mySqlBuilderFactory, 817 myDialectProvider, 818 theCountOnlyFlag); 819 GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch); 820 String sql = allTargetsSql.getSql(); 821 Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]); 822 823 List<Long> output = jdbcTemplate.query(sql, args, new RowMapper<Long>() { 824 @Override 825 public Long mapRow(ResultSet rs, int rowNum) throws SQLException { 826 if (myPartitionSettings.isPartitioningEnabled()) { 827 return rs.getLong(2); 828 } else { 829 return rs.getLong(1); 830 } 831 } 832 }); 833 834 // we add a search executor to fetch unlinked patients first 835 theSearchQueryExecutors.add(new ResolvedSearchQueryExecutor(output)); 836 } 837 838 List<String> typeSourceResources = new ArrayList<>(); 839 if (myParams.get(Constants.PARAM_TYPE) != null) { 840 typeSourceResources.addAll(extractTypeSourceResourcesFromParams()); 841 } 842 843 queryStack3.addPredicateEverythingOperation( 844 myResourceName, typeSourceResources, targetPids.toArray(new Long[0])); 845 846 // Add PID list predicate for full text search and/or lastn operation 847 addPidListPredicate(thePidList, sqlBuilder); 848 849 /* 850 * If offset is present, we want deduplicate the results by using GROUP BY 851 * ORDER BY is required to make sure we return unique results for each page 852 */ 853 if (theOffset != null) { 854 queryStack3.addGrouping(); 855 queryStack3.addOrdering(); 856 queryStack3.setUseAggregate(true); 857 } 858 859 /* 860 * Now perform the search 861 */ 862 executeSearch(theOffset, theSearchQueryExecutors, sqlBuilder); 863 } 864 865 private void addPidListPredicate(List<Long> thePidList, SearchQueryBuilder theSqlBuilder) { 866 if (thePidList != null && !thePidList.isEmpty()) { 867 theSqlBuilder.addResourceIdsPredicate(thePidList); 868 } 869 } 870 871 private void addLastUpdatePredicate(SearchQueryBuilder theSqlBuilder) { 872 DateRangeParam lu = myParams.getLastUpdated(); 873 if (lu != null && !lu.isEmpty()) { 874 Condition lastUpdatedPredicates = theSqlBuilder.addPredicateLastUpdated(lu); 875 theSqlBuilder.addPredicate(lastUpdatedPredicates); 876 } 877 } 878 879 private JdbcTemplate initializeJdbcTemplate(Integer theMaximumResults) { 880 JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource()); 881 jdbcTemplate.setFetchSize(myFetchSize); 882 if (theMaximumResults != null) { 883 jdbcTemplate.setMaxRows(theMaximumResults); 884 } 885 return jdbcTemplate; 886 } 887 888 private Collection<String> extractTypeSourceResourcesFromParams() { 889 890 List<List<IQueryParameterType>> listOfList = myParams.get(Constants.PARAM_TYPE); 891 892 // first off, let's flatten the list of list 893 List<IQueryParameterType> iQueryParameterTypesList = 894 listOfList.stream().flatMap(List::stream).collect(Collectors.toList()); 895 896 // then, extract all elements of each CSV into one big list 897 List<String> resourceTypes = iQueryParameterTypesList.stream() 898 .map(param -> ((StringParam) param).getValue()) 899 .map(csvString -> List.of(csvString.split(","))) 900 .flatMap(List::stream) 901 .collect(Collectors.toList()); 902 903 Set<String> knownResourceTypes = myContext.getResourceTypes(); 904 905 // remove leading/trailing whitespaces if any and remove duplicates 906 Set<String> retVal = new HashSet<>(); 907 908 for (String type : resourceTypes) { 909 String trimmed = type.trim(); 910 if (!knownResourceTypes.contains(trimmed)) { 911 throw new ResourceNotFoundException( 912 Msg.code(2197) + "Unknown resource type '" + trimmed + "' in _type parameter."); 913 } 914 retVal.add(trimmed); 915 } 916 917 return retVal; 918 } 919 920 private boolean isPotentiallyContainedReferenceParameterExistsAtRoot(SearchParameterMap theParams) { 921 return myStorageSettings.isIndexOnContainedResources() 922 && theParams.values().stream() 923 .flatMap(Collection::stream) 924 .flatMap(Collection::stream) 925 .anyMatch(ReferenceParam.class::isInstance); 926 } 927 928 private void createSort(QueryStack theQueryStack, SortSpec theSort, SearchParameterMap theParams) { 929 if (theSort == null || isBlank(theSort.getParamName())) { 930 return; 931 } 932 933 boolean ascending = (theSort.getOrder() == null) || (theSort.getOrder() == SortOrderEnum.ASC); 934 935 if (IAnyResource.SP_RES_ID.equals(theSort.getParamName())) { 936 937 theQueryStack.addSortOnResourceId(ascending); 938 939 } else if (Constants.PARAM_PID.equals(theSort.getParamName())) { 940 941 theQueryStack.addSortOnResourcePID(ascending); 942 943 } else if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) { 944 945 theQueryStack.addSortOnLastUpdated(ascending); 946 947 } else { 948 RuntimeSearchParam param = mySearchParamRegistry.getActiveSearchParam( 949 myResourceName, theSort.getParamName(), ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 950 951 /* 952 * If we have a sort like _sort=subject.name and we have an 953 * uplifted refchain for that combination we can do it more efficiently 954 * by using the index associated with the uplifted refchain. In this case, 955 * we need to find the actual target search parameter (corresponding 956 * to "name" in this example) so that we know what datatype it is. 957 */ 958 String paramName = theSort.getParamName(); 959 if (param == null && myStorageSettings.isIndexOnUpliftedRefchains()) { 960 String[] chains = StringUtils.split(paramName, '.'); 961 if (chains.length == 2) { 962 963 // Given: Encounter?_sort=Patient:subject.name 964 String referenceParam = chains[0]; // subject 965 String referenceParamTargetType = null; // Patient 966 String targetParam = chains[1]; // name 967 968 int colonIdx = referenceParam.indexOf(':'); 969 if (colonIdx > -1) { 970 referenceParamTargetType = referenceParam.substring(0, colonIdx); 971 referenceParam = referenceParam.substring(colonIdx + 1); 972 } 973 RuntimeSearchParam outerParam = mySearchParamRegistry.getActiveSearchParam( 974 myResourceName, referenceParam, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 975 if (outerParam == null) { 976 throwInvalidRequestExceptionForUnknownSortParameter(myResourceName, referenceParam); 977 } else if (outerParam.hasUpliftRefchain(targetParam)) { 978 for (String nextTargetType : outerParam.getTargets()) { 979 if (referenceParamTargetType != null && !referenceParamTargetType.equals(nextTargetType)) { 980 continue; 981 } 982 RuntimeSearchParam innerParam = mySearchParamRegistry.getActiveSearchParam( 983 nextTargetType, 984 targetParam, 985 ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 986 if (innerParam != null) { 987 param = innerParam; 988 break; 989 } 990 } 991 } 992 } 993 } 994 995 int colonIdx = paramName.indexOf(':'); 996 String referenceTargetType = null; 997 if (colonIdx > -1) { 998 referenceTargetType = paramName.substring(0, colonIdx); 999 paramName = paramName.substring(colonIdx + 1); 1000 } 1001 1002 int dotIdx = paramName.indexOf('.'); 1003 String chainName = null; 1004 if (param == null && dotIdx > -1) { 1005 chainName = paramName.substring(dotIdx + 1); 1006 paramName = paramName.substring(0, dotIdx); 1007 if (chainName.contains(".")) { 1008 String msg = myContext 1009 .getLocalizer() 1010 .getMessageSanitized( 1011 BaseStorageDao.class, 1012 "invalidSortParameterTooManyChains", 1013 paramName + "." + chainName); 1014 throw new InvalidRequestException(Msg.code(2286) + msg); 1015 } 1016 } 1017 1018 if (param == null) { 1019 param = mySearchParamRegistry.getActiveSearchParam( 1020 myResourceName, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1021 } 1022 1023 if (param == null) { 1024 throwInvalidRequestExceptionForUnknownSortParameter(getResourceName(), paramName); 1025 } 1026 1027 // param will never be null here (the above line throws if it does) 1028 // this is just to prevent the warning 1029 assert param != null; 1030 if (isNotBlank(chainName) && param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) { 1031 throw new InvalidRequestException( 1032 Msg.code(2285) + "Invalid chain, " + paramName + " is not a reference SearchParameter"); 1033 } 1034 1035 switch (param.getParamType()) { 1036 case STRING: 1037 theQueryStack.addSortOnString(myResourceName, paramName, ascending); 1038 break; 1039 case DATE: 1040 theQueryStack.addSortOnDate(myResourceName, paramName, ascending); 1041 break; 1042 case REFERENCE: 1043 theQueryStack.addSortOnResourceLink( 1044 myResourceName, referenceTargetType, paramName, chainName, ascending, theParams); 1045 break; 1046 case TOKEN: 1047 theQueryStack.addSortOnToken(myResourceName, paramName, ascending); 1048 break; 1049 case NUMBER: 1050 theQueryStack.addSortOnNumber(myResourceName, paramName, ascending); 1051 break; 1052 case URI: 1053 theQueryStack.addSortOnUri(myResourceName, paramName, ascending); 1054 break; 1055 case QUANTITY: 1056 theQueryStack.addSortOnQuantity(myResourceName, paramName, ascending); 1057 break; 1058 case COMPOSITE: 1059 List<RuntimeSearchParam> compositeList = 1060 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, param); 1061 if (compositeList == null) { 1062 throw new InvalidRequestException(Msg.code(1195) + "The composite _sort parameter " + paramName 1063 + " is not defined by the resource " + myResourceName); 1064 } 1065 if (compositeList.size() != 2) { 1066 throw new InvalidRequestException(Msg.code(1196) + "The composite _sort parameter " + paramName 1067 + " must have 2 composite types declared in parameter annotation, found " 1068 + compositeList.size()); 1069 } 1070 RuntimeSearchParam left = compositeList.get(0); 1071 RuntimeSearchParam right = compositeList.get(1); 1072 1073 createCompositeSort(theQueryStack, left.getParamType(), left.getName(), ascending); 1074 createCompositeSort(theQueryStack, right.getParamType(), right.getName(), ascending); 1075 1076 break; 1077 case SPECIAL: 1078 if (LOCATION_POSITION.equals(param.getPath())) { 1079 theQueryStack.addSortOnCoordsNear(paramName, ascending, theParams); 1080 break; 1081 } 1082 throw new InvalidRequestException( 1083 Msg.code(2306) + "This server does not support _sort specifications of type " 1084 + param.getParamType() + " - Can't serve _sort=" + paramName); 1085 1086 case HAS: 1087 default: 1088 throw new InvalidRequestException( 1089 Msg.code(1197) + "This server does not support _sort specifications of type " 1090 + param.getParamType() + " - Can't serve _sort=" + paramName); 1091 } 1092 } 1093 1094 // Recurse 1095 createSort(theQueryStack, theSort.getChain(), theParams); 1096 } 1097 1098 private void throwInvalidRequestExceptionForUnknownSortParameter(String theResourceName, String theParamName) { 1099 Collection<String> validSearchParameterNames = mySearchParamRegistry.getValidSearchParameterNamesIncludingMeta( 1100 theResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SORT); 1101 String msg = myContext 1102 .getLocalizer() 1103 .getMessageSanitized( 1104 BaseStorageDao.class, 1105 "invalidSortParameter", 1106 theParamName, 1107 theResourceName, 1108 validSearchParameterNames); 1109 throw new InvalidRequestException(Msg.code(1194) + msg); 1110 } 1111 1112 private void createCompositeSort( 1113 QueryStack theQueryStack, 1114 RestSearchParameterTypeEnum theParamType, 1115 String theParamName, 1116 boolean theAscending) { 1117 1118 switch (theParamType) { 1119 case STRING: 1120 theQueryStack.addSortOnString(myResourceName, theParamName, theAscending); 1121 break; 1122 case DATE: 1123 theQueryStack.addSortOnDate(myResourceName, theParamName, theAscending); 1124 break; 1125 case TOKEN: 1126 theQueryStack.addSortOnToken(myResourceName, theParamName, theAscending); 1127 break; 1128 case QUANTITY: 1129 theQueryStack.addSortOnQuantity(myResourceName, theParamName, theAscending); 1130 break; 1131 case NUMBER: 1132 case REFERENCE: 1133 case COMPOSITE: 1134 case URI: 1135 case HAS: 1136 case SPECIAL: 1137 default: 1138 throw new InvalidRequestException( 1139 Msg.code(1198) + "Don't know how to handle composite parameter with type of " + theParamType 1140 + " on _sort=" + theParamName); 1141 } 1142 } 1143 1144 private void doLoadPids( 1145 Collection<JpaPid> thePids, 1146 Collection<JpaPid> theIncludedPids, 1147 List<IBaseResource> theResourceListToPopulate, 1148 boolean theForHistoryOperation, 1149 Map<JpaPid, Integer> thePosition) { 1150 1151 Map<Long, Long> resourcePidToVersion = null; 1152 for (JpaPid next : thePids) { 1153 if (next.getVersion() != null && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1154 if (resourcePidToVersion == null) { 1155 resourcePidToVersion = new HashMap<>(); 1156 } 1157 resourcePidToVersion.put((next).getId(), next.getVersion()); 1158 } 1159 } 1160 1161 List<Long> versionlessPids = JpaPid.toLongList(thePids); 1162 if (versionlessPids.size() < getMaximumPageSize()) { 1163 versionlessPids = normalizeIdListForInClause(versionlessPids); 1164 } 1165 1166 // -- get the resource from the searchView 1167 Collection<ResourceSearchView> resourceSearchViewList = 1168 myResourceSearchViewDao.findByResourceIds(versionlessPids); 1169 1170 // -- preload all tags with tag definition if any 1171 Map<Long, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList); 1172 1173 for (IBaseResourceEntity next : resourceSearchViewList) { 1174 if (next.getDeleted() != null) { 1175 continue; 1176 } 1177 1178 Class<? extends IBaseResource> resourceType = 1179 myContext.getResourceDefinition(next.getResourceType()).getImplementingClass(); 1180 1181 JpaPid resourceId = JpaPid.fromId(next.getResourceId()); 1182 1183 /* 1184 * If a specific version is requested via an include, we'll replace the current version 1185 * with the specific desired version. This is not the most efficient thing, given that 1186 * we're loading the current version and then turning around and throwing it away again. 1187 * This could be optimized and probably should be, but it's not critical given that 1188 * this only applies to includes, which don't tend to be massive in numbers. 1189 */ 1190 if (resourcePidToVersion != null) { 1191 Long version = resourcePidToVersion.get(next.getResourceId()); 1192 resourceId.setVersion(version); 1193 if (version != null && !version.equals(next.getVersion())) { 1194 IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(resourceType); 1195 next = (IBaseResourceEntity) 1196 dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null); 1197 } 1198 } 1199 1200 IBaseResource resource = null; 1201 if (next != null) { 1202 resource = myJpaStorageResourceParser.toResource( 1203 resourceType, next, tagMap.get(next.getId()), theForHistoryOperation); 1204 } 1205 if (resource == null) { 1206 if (next != null) { 1207 ourLog.warn( 1208 "Unable to find resource {}/{}/_history/{} in database", 1209 next.getResourceType(), 1210 next.getIdDt().getIdPart(), 1211 next.getVersion()); 1212 } else { 1213 ourLog.warn("Unable to find resource in database."); 1214 } 1215 continue; 1216 } 1217 1218 Integer index = thePosition.get(resourceId); 1219 if (index == null) { 1220 ourLog.warn("Got back unexpected resource PID {}", resourceId); 1221 continue; 1222 } 1223 1224 if (theIncludedPids.contains(resourceId)) { 1225 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.INCLUDE); 1226 } else { 1227 ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put(resource, BundleEntrySearchModeEnum.MATCH); 1228 } 1229 1230 theResourceListToPopulate.set(index, resource); 1231 } 1232 } 1233 1234 private Map<Long, Collection<ResourceTag>> getResourceTagMap( 1235 Collection<? extends IBaseResourceEntity> theResourceSearchViewList) { 1236 1237 List<Long> idList = new ArrayList<>(theResourceSearchViewList.size()); 1238 1239 // -- find all resource has tags 1240 for (IBaseResourceEntity resource : theResourceSearchViewList) { 1241 if (resource.isHasTags()) idList.add(resource.getId()); 1242 } 1243 1244 return getPidToTagMap(idList); 1245 } 1246 1247 @Nonnull 1248 private Map<Long, Collection<ResourceTag>> getPidToTagMap(List<Long> thePidList) { 1249 Map<Long, Collection<ResourceTag>> tagMap = new HashMap<>(); 1250 1251 // -- no tags 1252 if (thePidList.isEmpty()) return tagMap; 1253 1254 // -- get all tags for the idList 1255 Collection<ResourceTag> tagList = myResourceTagDao.findByResourceIds(thePidList); 1256 1257 // -- build the map, key = resourceId, value = list of ResourceTag 1258 JpaPid resourceId; 1259 Collection<ResourceTag> tagCol; 1260 for (ResourceTag tag : tagList) { 1261 1262 resourceId = JpaPid.fromId(tag.getResourceId()); 1263 tagCol = tagMap.get(resourceId.getId()); 1264 if (tagCol == null) { 1265 tagCol = new ArrayList<>(); 1266 tagCol.add(tag); 1267 tagMap.put(resourceId.getId(), tagCol); 1268 } else { 1269 tagCol.add(tag); 1270 } 1271 } 1272 1273 return tagMap; 1274 } 1275 1276 @Override 1277 public void loadResourcesByPid( 1278 Collection<JpaPid> thePids, 1279 Collection<JpaPid> theIncludedPids, 1280 List<IBaseResource> theResourceListToPopulate, 1281 boolean theForHistoryOperation, 1282 RequestDetails theDetails) { 1283 if (thePids.isEmpty()) { 1284 ourLog.debug("The include pids are empty"); 1285 } 1286 1287 // Dupes will cause a crash later anyhow, but this is expensive so only do it 1288 // when running asserts 1289 assert new HashSet<>(thePids).size() == thePids.size() : "PID list contains duplicates: " + thePids; 1290 1291 Map<JpaPid, Integer> position = new HashMap<>(); 1292 for (JpaPid next : thePids) { 1293 position.put(next, theResourceListToPopulate.size()); 1294 theResourceListToPopulate.add(null); 1295 } 1296 1297 // Can we fast track this loading by checking elastic search? 1298 if (isLoadingFromElasticSearchSupported(thePids)) { 1299 try { 1300 theResourceListToPopulate.addAll(loadResourcesFromElasticSearch(thePids)); 1301 return; 1302 1303 } catch (ResourceNotFoundInIndexException theE) { 1304 // some resources were not found in index, so we will inform this and resort to JPA search 1305 ourLog.warn( 1306 "Some resources were not found in index. Make sure all resources were indexed. Resorting to database search."); 1307 } 1308 } 1309 1310 // We only chunk because some jdbc drivers can't handle long param lists. 1311 new QueryChunker<JpaPid>() 1312 .chunk( 1313 thePids, 1314 t -> doLoadPids( 1315 t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position)); 1316 } 1317 1318 /** 1319 * Check if we can load the resources from Hibernate Search instead of the database. 1320 * We assume this is faster. 1321 * <p> 1322 * Hibernate Search only stores the current version, and only if enabled. 1323 * 1324 * @param thePids the pids to check for versioned references 1325 * @return can we fetch from Hibernate Search? 1326 */ 1327 private boolean isLoadingFromElasticSearchSupported(Collection<JpaPid> thePids) { 1328 // is storage enabled? 1329 return myStorageSettings.isStoreResourceInHSearchIndex() 1330 && myStorageSettings.isAdvancedHSearchIndexing() 1331 && 1332 // we don't support history 1333 thePids.stream().noneMatch(p -> p.getVersion() != null) 1334 && 1335 // skip the complexity for metadata in dstu2 1336 myContext.getVersion().getVersion().isEqualOrNewerThan(FhirVersionEnum.DSTU3); 1337 } 1338 1339 private List<IBaseResource> loadResourcesFromElasticSearch(Collection<JpaPid> thePids) { 1340 // Do we use the fulltextsvc via hibernate-search to load resources or be backwards compatible with older ES 1341 // only impl 1342 // to handle lastN? 1343 if (myStorageSettings.isAdvancedHSearchIndexing() && myStorageSettings.isStoreResourceInHSearchIndex()) { 1344 List<Long> pidList = thePids.stream().map(JpaPid::getId).collect(Collectors.toList()); 1345 1346 return myFulltextSearchSvc.getResources(pidList); 1347 } else if (!Objects.isNull(myParams) && myParams.isLastN()) { 1348 // legacy LastN implementation 1349 return myIElasticsearchSvc.getObservationResources(thePids); 1350 } else { 1351 return Collections.emptyList(); 1352 } 1353 } 1354 1355 /** 1356 * THIS SHOULD RETURN HASHSET and not just Set because we add to it later 1357 * so it can't be Collections.emptySet() or some such thing. 1358 * The JpaPid returned will have resource type populated. 1359 */ 1360 @Override 1361 public Set<JpaPid> loadIncludes( 1362 FhirContext theContext, 1363 EntityManager theEntityManager, 1364 Collection<JpaPid> theMatches, 1365 Collection<Include> theIncludes, 1366 boolean theReverseMode, 1367 DateRangeParam theLastUpdated, 1368 String theSearchIdOrDescription, 1369 RequestDetails theRequest, 1370 Integer theMaxCount) { 1371 SearchBuilderLoadIncludesParameters<JpaPid> parameters = new SearchBuilderLoadIncludesParameters<>(); 1372 parameters.setFhirContext(theContext); 1373 parameters.setEntityManager(theEntityManager); 1374 parameters.setMatches(theMatches); 1375 parameters.setIncludeFilters(theIncludes); 1376 parameters.setReverseMode(theReverseMode); 1377 parameters.setLastUpdated(theLastUpdated); 1378 parameters.setSearchIdOrDescription(theSearchIdOrDescription); 1379 parameters.setRequestDetails(theRequest); 1380 parameters.setMaxCount(theMaxCount); 1381 return loadIncludes(parameters); 1382 } 1383 1384 @Override 1385 public Set<JpaPid> loadIncludes(SearchBuilderLoadIncludesParameters<JpaPid> theParameters) { 1386 Collection<JpaPid> matches = theParameters.getMatches(); 1387 Collection<Include> currentIncludes = theParameters.getIncludeFilters(); 1388 boolean reverseMode = theParameters.isReverseMode(); 1389 EntityManager entityManager = theParameters.getEntityManager(); 1390 Integer maxCount = theParameters.getMaxCount(); 1391 FhirContext fhirContext = theParameters.getFhirContext(); 1392 RequestDetails request = theParameters.getRequestDetails(); 1393 String searchIdOrDescription = theParameters.getSearchIdOrDescription(); 1394 List<String> desiredResourceTypes = theParameters.getDesiredResourceTypes(); 1395 boolean hasDesiredResourceTypes = desiredResourceTypes != null && !desiredResourceTypes.isEmpty(); 1396 if (CompositeInterceptorBroadcaster.hasHooks( 1397 Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, theParameters.getRequestDetails())) { 1398 CurrentThreadCaptureQueriesListener.startCapturing(); 1399 } 1400 if (matches.isEmpty()) { 1401 return new HashSet<>(); 1402 } 1403 if (currentIncludes == null || currentIncludes.isEmpty()) { 1404 return new HashSet<>(); 1405 } 1406 String searchPidFieldName = reverseMode ? MY_TARGET_RESOURCE_PID : MY_SOURCE_RESOURCE_PID; 1407 String findPidFieldName = reverseMode ? MY_SOURCE_RESOURCE_PID : MY_TARGET_RESOURCE_PID; 1408 String findResourceTypeFieldName = reverseMode ? MY_SOURCE_RESOURCE_TYPE : MY_TARGET_RESOURCE_TYPE; 1409 String findVersionFieldName = null; 1410 if (!reverseMode && myStorageSettings.isRespectVersionsForSearchIncludes()) { 1411 findVersionFieldName = MY_TARGET_RESOURCE_VERSION; 1412 } 1413 1414 List<JpaPid> nextRoundMatches = new ArrayList<>(matches); 1415 HashSet<JpaPid> allAdded = new HashSet<>(); 1416 HashSet<JpaPid> original = new HashSet<>(matches); 1417 ArrayList<Include> includes = new ArrayList<>(currentIncludes); 1418 1419 int roundCounts = 0; 1420 StopWatch w = new StopWatch(); 1421 1422 boolean addedSomeThisRound; 1423 do { 1424 roundCounts++; 1425 1426 HashSet<JpaPid> pidsToInclude = new HashSet<>(); 1427 1428 for (Iterator<Include> iter = includes.iterator(); iter.hasNext(); ) { 1429 Include nextInclude = iter.next(); 1430 if (!nextInclude.isRecurse()) { 1431 iter.remove(); 1432 } 1433 1434 // Account for _include=* 1435 boolean matchAll = "*".equals(nextInclude.getValue()); 1436 1437 // Account for _include=[resourceType]:* 1438 String wantResourceType = null; 1439 if (!matchAll) { 1440 if ("*".equals(nextInclude.getParamName())) { 1441 wantResourceType = nextInclude.getParamType(); 1442 matchAll = true; 1443 } 1444 } 1445 1446 if (matchAll) { 1447 loadIncludesMatchAll( 1448 findPidFieldName, 1449 findResourceTypeFieldName, 1450 findVersionFieldName, 1451 searchPidFieldName, 1452 wantResourceType, 1453 reverseMode, 1454 hasDesiredResourceTypes, 1455 nextRoundMatches, 1456 entityManager, 1457 maxCount, 1458 desiredResourceTypes, 1459 pidsToInclude, 1460 request); 1461 } else { 1462 loadIncludesMatchSpecific( 1463 nextInclude, 1464 fhirContext, 1465 findPidFieldName, 1466 findVersionFieldName, 1467 searchPidFieldName, 1468 reverseMode, 1469 nextRoundMatches, 1470 entityManager, 1471 maxCount, 1472 pidsToInclude); 1473 } 1474 } 1475 1476 nextRoundMatches.clear(); 1477 for (JpaPid next : pidsToInclude) { 1478 if (!original.contains(next) && !allAdded.contains(next)) { 1479 nextRoundMatches.add(next); 1480 } 1481 } 1482 1483 addedSomeThisRound = allAdded.addAll(pidsToInclude); 1484 1485 if (maxCount != null && allAdded.size() >= maxCount) { 1486 break; 1487 } 1488 1489 } while (!includes.isEmpty() && !nextRoundMatches.isEmpty() && addedSomeThisRound); 1490 1491 allAdded.removeAll(original); 1492 1493 ourLog.info( 1494 "Loaded {} {} in {} rounds and {} ms for search {}", 1495 allAdded.size(), 1496 reverseMode ? "_revincludes" : "_includes", 1497 roundCounts, 1498 w.getMillisAndRestart(), 1499 searchIdOrDescription); 1500 1501 if (CompositeInterceptorBroadcaster.hasHooks( 1502 Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, request)) { 1503 callRawSqlHookWithCurrentThreadQueries(request); 1504 } 1505 // Interceptor call: STORAGE_PREACCESS_RESOURCES 1506 // This can be used to remove results from the search result details before 1507 // the user has a chance to know that they were in the results 1508 if (!allAdded.isEmpty()) { 1509 1510 if (CompositeInterceptorBroadcaster.hasHooks( 1511 Pointcut.STORAGE_PREACCESS_RESOURCES, myInterceptorBroadcaster, request)) { 1512 List<JpaPid> includedPidList = new ArrayList<>(allAdded); 1513 JpaPreResourceAccessDetails accessDetails = 1514 new JpaPreResourceAccessDetails(includedPidList, () -> this); 1515 HookParams params = new HookParams() 1516 .add(IPreResourceAccessDetails.class, accessDetails) 1517 .add(RequestDetails.class, request) 1518 .addIfMatchesType(ServletRequestDetails.class, request); 1519 CompositeInterceptorBroadcaster.doCallHooks( 1520 myInterceptorBroadcaster, request, Pointcut.STORAGE_PREACCESS_RESOURCES, params); 1521 1522 for (int i = includedPidList.size() - 1; i >= 0; i--) { 1523 if (accessDetails.isDontReturnResourceAtIndex(i)) { 1524 JpaPid value = includedPidList.remove(i); 1525 if (value != null) { 1526 allAdded.remove(value); 1527 } 1528 } 1529 } 1530 } 1531 } 1532 1533 return allAdded; 1534 } 1535 1536 private void loadIncludesMatchSpecific( 1537 Include nextInclude, 1538 FhirContext fhirContext, 1539 String findPidFieldName, 1540 String findVersionFieldName, 1541 String searchPidFieldName, 1542 boolean reverseMode, 1543 List<JpaPid> nextRoundMatches, 1544 EntityManager entityManager, 1545 Integer maxCount, 1546 HashSet<JpaPid> pidsToInclude) { 1547 List<String> paths; 1548 1549 // Start replace 1550 RuntimeSearchParam param; 1551 String resType = nextInclude.getParamType(); 1552 if (isBlank(resType)) { 1553 return; 1554 } 1555 RuntimeResourceDefinition def = fhirContext.getResourceDefinition(resType); 1556 if (def == null) { 1557 ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue()); 1558 return; 1559 } 1560 1561 String paramName = nextInclude.getParamName(); 1562 if (isNotBlank(paramName)) { 1563 param = mySearchParamRegistry.getActiveSearchParam( 1564 resType, paramName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1565 } else { 1566 param = null; 1567 } 1568 if (param == null) { 1569 ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue()); 1570 return; 1571 } 1572 1573 paths = param.getPathsSplitForResourceType(resType); 1574 // end replace 1575 1576 Set<String> targetResourceTypes = computeTargetResourceTypes(nextInclude, param); 1577 1578 for (String nextPath : paths) { 1579 String findPidFieldSqlColumn = 1580 findPidFieldName.equals(MY_SOURCE_RESOURCE_PID) ? "src_resource_id" : "target_resource_id"; 1581 String fieldsToLoad = "r." + findPidFieldSqlColumn + " AS " + RESOURCE_ID_ALIAS; 1582 if (findVersionFieldName != null) { 1583 fieldsToLoad += ", r.target_resource_version AS " + RESOURCE_VERSION_ALIAS; 1584 } 1585 1586 // Query for includes lookup has 2 cases 1587 // Case 1: Where target_resource_id is available in hfj_res_link table for local references 1588 // Case 2: Where target_resource_id is null in hfj_res_link table and referred by a canonical 1589 // url in target_resource_url 1590 1591 // Case 1: 1592 Map<String, Object> localReferenceQueryParams = new HashMap<>(); 1593 1594 String searchPidFieldSqlColumn = 1595 searchPidFieldName.equals(MY_TARGET_RESOURCE_PID) ? "target_resource_id" : "src_resource_id"; 1596 StringBuilder localReferenceQuery = new StringBuilder("SELECT " + fieldsToLoad + " FROM hfj_res_link r " 1597 + " WHERE r.src_path = :src_path AND " 1598 + " r.target_resource_id IS NOT NULL AND " 1599 + " r." 1600 + searchPidFieldSqlColumn + " IN (:target_pids) "); 1601 localReferenceQueryParams.put("src_path", nextPath); 1602 // we loop over target_pids later. 1603 if (targetResourceTypes != null) { 1604 if (targetResourceTypes.size() == 1) { 1605 localReferenceQuery.append(" AND r.target_resource_type = :target_resource_type "); 1606 localReferenceQueryParams.put( 1607 "target_resource_type", 1608 targetResourceTypes.iterator().next()); 1609 } else { 1610 localReferenceQuery.append(" AND r.target_resource_type in (:target_resource_types) "); 1611 localReferenceQueryParams.put("target_resource_types", targetResourceTypes); 1612 } 1613 } 1614 1615 // Case 2: 1616 Pair<String, Map<String, Object>> canonicalQuery = 1617 buildCanonicalUrlQuery(findVersionFieldName, targetResourceTypes, reverseMode); 1618 1619 String sql = localReferenceQuery + " UNION " + canonicalQuery.getLeft(); 1620 1621 Map<String, Object> limitParams = new HashMap<>(); 1622 if (maxCount != null) { 1623 LinkedList<Object> bindVariables = new LinkedList<>(); 1624 sql = SearchQueryBuilder.applyLimitToSql( 1625 myDialectProvider.getDialect(), null, maxCount, sql, null, bindVariables); 1626 1627 // The dialect SQL limiter uses positional params, but we're using 1628 // named params here, so we need to replace the positional params 1629 // with equivalent named ones 1630 StringBuilder sb = new StringBuilder(); 1631 for (int i = 0; i < sql.length(); i++) { 1632 char nextChar = sql.charAt(i); 1633 if (nextChar == '?') { 1634 String nextName = "limit" + i; 1635 sb.append(':').append(nextName); 1636 limitParams.put(nextName, bindVariables.removeFirst()); 1637 } else { 1638 sb.append(nextChar); 1639 } 1640 } 1641 sql = sb.toString(); 1642 } 1643 1644 List<Collection<JpaPid>> partitions = partition(nextRoundMatches, getMaximumPageSize()); 1645 for (Collection<JpaPid> nextPartition : partitions) { 1646 Query q = entityManager.createNativeQuery(sql, Tuple.class); 1647 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1648 localReferenceQueryParams.forEach(q::setParameter); 1649 canonicalQuery.getRight().forEach(q::setParameter); 1650 limitParams.forEach(q::setParameter); 1651 1652 @SuppressWarnings("unchecked") 1653 List<Tuple> results = q.getResultList(); 1654 for (Tuple result : results) { 1655 if (result != null) { 1656 Long resourceId = NumberUtils.createLong(String.valueOf(result.get(RESOURCE_ID_ALIAS))); 1657 Long resourceVersion = null; 1658 if (findVersionFieldName != null && result.get(RESOURCE_VERSION_ALIAS) != null) { 1659 resourceVersion = 1660 NumberUtils.createLong(String.valueOf(result.get(RESOURCE_VERSION_ALIAS))); 1661 } 1662 pidsToInclude.add(JpaPid.fromIdAndVersion(resourceId, resourceVersion)); 1663 } 1664 } 1665 } 1666 } 1667 } 1668 1669 private void loadIncludesMatchAll( 1670 String findPidFieldName, 1671 String findResourceTypeFieldName, 1672 String findVersionFieldName, 1673 String searchPidFieldName, 1674 String wantResourceType, 1675 boolean reverseMode, 1676 boolean hasDesiredResourceTypes, 1677 List<JpaPid> nextRoundMatches, 1678 EntityManager entityManager, 1679 Integer maxCount, 1680 List<String> desiredResourceTypes, 1681 HashSet<JpaPid> pidsToInclude, 1682 RequestDetails request) { 1683 StringBuilder sqlBuilder = new StringBuilder(); 1684 sqlBuilder.append("SELECT r.").append(findPidFieldName); 1685 sqlBuilder.append(", r.").append(findResourceTypeFieldName); 1686 sqlBuilder.append(", r.myTargetResourceUrl"); 1687 if (findVersionFieldName != null) { 1688 sqlBuilder.append(", r.").append(findVersionFieldName); 1689 } 1690 sqlBuilder.append(" FROM ResourceLink r WHERE "); 1691 1692 sqlBuilder.append("r."); 1693 sqlBuilder.append(searchPidFieldName); // (rev mode) target_resource_id | source_resource_id 1694 sqlBuilder.append(" IN (:target_pids)"); 1695 1696 /* 1697 * We need to set the resource type in 2 cases only: 1698 * 1) we are in $everything mode 1699 * (where we only want to fetch specific resource types, regardless of what is 1700 * available to fetch) 1701 * 2) we are doing revincludes 1702 * 1703 * Technically if the request is a qualified star (e.g. _include=Observation:*) we 1704 * should always be checking the source resource type on the resource link. We don't 1705 * actually index that column though by default, so in order to try and be efficient 1706 * we don't actually include it for includes (but we do for revincludes). This is 1707 * because for an include, it doesn't really make sense to include a different 1708 * resource type than the one you are searching on. 1709 */ 1710 if (wantResourceType != null && (reverseMode || (myParams != null && myParams.getEverythingMode() != null))) { 1711 // because mySourceResourceType is not part of the HFJ_RES_LINK 1712 // index, this might not be the most optimal performance. 1713 // but it is for an $everything operation (and maybe we should update the index) 1714 sqlBuilder.append(" AND r.mySourceResourceType = :want_resource_type"); 1715 } else { 1716 wantResourceType = null; 1717 } 1718 1719 // When calling $everything on a Patient instance, we don't want to recurse into new Patient 1720 // resources 1721 // (e.g. via Provenance, List, or Group) when in an $everything operation 1722 if (myParams != null 1723 && myParams.getEverythingMode() == SearchParameterMap.EverythingModeEnum.PATIENT_INSTANCE) { 1724 sqlBuilder.append(" AND r.myTargetResourceType != 'Patient'"); 1725 sqlBuilder.append(UNDESIRED_RESOURCE_LINKAGES_FOR_EVERYTHING_ON_PATIENT_INSTANCE.stream() 1726 .collect(Collectors.joining("', '", " AND r.mySourceResourceType NOT IN ('", "')"))); 1727 } 1728 if (hasDesiredResourceTypes) { 1729 sqlBuilder.append(" AND r.myTargetResourceType IN (:desired_target_resource_types)"); 1730 } 1731 1732 String sql = sqlBuilder.toString(); 1733 List<Collection<JpaPid>> partitions = partition(nextRoundMatches, getMaximumPageSize()); 1734 for (Collection<JpaPid> nextPartition : partitions) { 1735 TypedQuery<?> q = entityManager.createQuery(sql, Object[].class); 1736 q.setParameter("target_pids", JpaPid.toLongList(nextPartition)); 1737 if (wantResourceType != null) { 1738 q.setParameter("want_resource_type", wantResourceType); 1739 } 1740 if (maxCount != null) { 1741 q.setMaxResults(maxCount); 1742 } 1743 if (hasDesiredResourceTypes) { 1744 q.setParameter("desired_target_resource_types", desiredResourceTypes); 1745 } 1746 List<?> results = q.getResultList(); 1747 Set<String> canonicalUrls = null; 1748 for (Object nextRow : results) { 1749 if (nextRow == null) { 1750 // This can happen if there are outgoing references which are canonical or point to 1751 // other servers 1752 continue; 1753 } 1754 1755 Long version = null; 1756 Long resourceId = (Long) ((Object[]) nextRow)[0]; 1757 String resourceType = (String) ((Object[]) nextRow)[1]; 1758 String resourceCanonicalUrl = (String) ((Object[]) nextRow)[2]; 1759 if (findVersionFieldName != null) { 1760 version = (Long) ((Object[]) nextRow)[3]; 1761 } 1762 1763 if (resourceId != null) { 1764 JpaPid pid = JpaPid.fromIdAndVersionAndResourceType(resourceId, version, resourceType); 1765 pidsToInclude.add(pid); 1766 } else if (resourceCanonicalUrl != null) { 1767 if (canonicalUrls == null) { 1768 canonicalUrls = new HashSet<>(); 1769 } 1770 canonicalUrls.add(resourceCanonicalUrl); 1771 } 1772 } 1773 1774 if (canonicalUrls != null) { 1775 String message = 1776 "Search with _include=* can be inefficient when references using canonical URLs are detected. Use more specific _include values instead."; 1777 firePerformanceWarning(request, message); 1778 loadCanonicalUrls(canonicalUrls, entityManager, pidsToInclude, reverseMode); 1779 } 1780 } 1781 } 1782 1783 private void loadCanonicalUrls( 1784 Set<String> theCanonicalUrls, 1785 EntityManager theEntityManager, 1786 HashSet<JpaPid> thePidsToInclude, 1787 boolean theReverse) { 1788 StringBuilder sqlBuilder; 1789 Set<Long> identityHashesForTypes = calculateIndexUriIdentityHashesForResourceTypes(null, theReverse); 1790 List<Collection<String>> canonicalUrlPartitions = 1791 partition(theCanonicalUrls, getMaximumPageSize() - identityHashesForTypes.size()); 1792 1793 sqlBuilder = new StringBuilder(); 1794 sqlBuilder.append("SELECT i.myResourcePid "); 1795 sqlBuilder.append("FROM ResourceIndexedSearchParamUri i "); 1796 sqlBuilder.append("WHERE i.myHashIdentity IN (:hash_identity) "); 1797 sqlBuilder.append("AND i.myUri IN (:uris)"); 1798 1799 String canonicalResSql = sqlBuilder.toString(); 1800 1801 for (Collection<String> nextCanonicalUrlList : canonicalUrlPartitions) { 1802 TypedQuery<Long> canonicalResIdQuery = theEntityManager.createQuery(canonicalResSql, Long.class); 1803 canonicalResIdQuery.setParameter("hash_identity", identityHashesForTypes); 1804 canonicalResIdQuery.setParameter("uris", nextCanonicalUrlList); 1805 List<Long> resIds = canonicalResIdQuery.getResultList(); 1806 for (var next : resIds) { 1807 if (next != null) { 1808 thePidsToInclude.add(JpaPid.fromId(next)); 1809 } 1810 } 1811 } 1812 } 1813 1814 /** 1815 * Calls Performance Trace Hook 1816 * @param request the request deatils 1817 * Sends a raw SQL query to the Pointcut for raw SQL queries. 1818 */ 1819 private void callRawSqlHookWithCurrentThreadQueries(RequestDetails request) { 1820 SqlQueryList capturedQueries = CurrentThreadCaptureQueriesListener.getCurrentQueueAndStopCapturing(); 1821 HookParams params = new HookParams() 1822 .add(RequestDetails.class, request) 1823 .addIfMatchesType(ServletRequestDetails.class, request) 1824 .add(SqlQueryList.class, capturedQueries); 1825 CompositeInterceptorBroadcaster.doCallHooks( 1826 myInterceptorBroadcaster, request, Pointcut.JPA_PERFTRACE_RAW_SQL, params); 1827 } 1828 1829 @Nullable 1830 private static Set<String> computeTargetResourceTypes(Include nextInclude, RuntimeSearchParam param) { 1831 String targetResourceType = defaultString(nextInclude.getParamTargetType(), null); 1832 boolean haveTargetTypesDefinedByParam = param.hasTargets(); 1833 Set<String> targetResourceTypes; 1834 if (targetResourceType != null) { 1835 targetResourceTypes = Set.of(targetResourceType); 1836 } else if (haveTargetTypesDefinedByParam) { 1837 targetResourceTypes = param.getTargets(); 1838 } else { 1839 // all types! 1840 targetResourceTypes = null; 1841 } 1842 return targetResourceTypes; 1843 } 1844 1845 @Nonnull 1846 private Pair<String, Map<String, Object>> buildCanonicalUrlQuery( 1847 String theVersionFieldName, Set<String> theTargetResourceTypes, boolean theReverse) { 1848 String fieldsToLoadFromSpidxUriTable = theReverse ? "r.src_resource_id" : "rUri.res_id"; 1849 if (theVersionFieldName != null) { 1850 // canonical-uri references aren't versioned, but we need to match the column count for the UNION 1851 fieldsToLoadFromSpidxUriTable += ", NULL"; 1852 } 1853 // The logical join will be by hfj_spidx_uri on sp_name='uri' and sp_uri=target_resource_url. 1854 // But sp_name isn't indexed, so we use hash_identity instead. 1855 Set<Long> identityHashesForTypes = 1856 calculateIndexUriIdentityHashesForResourceTypes(theTargetResourceTypes, theReverse); 1857 1858 Map<String, Object> canonicalUriQueryParams = new HashMap<>(); 1859 StringBuilder canonicalUrlQuery = new StringBuilder( 1860 "SELECT " + fieldsToLoadFromSpidxUriTable + " FROM hfj_res_link r " + " JOIN hfj_spidx_uri rUri ON ( "); 1861 // join on hash_identity and sp_uri - indexed in IDX_SP_URI_HASH_IDENTITY_V2 1862 if (theTargetResourceTypes != null && theTargetResourceTypes.size() == 1) { 1863 canonicalUrlQuery.append(" rUri.hash_identity = :uri_identity_hash "); 1864 canonicalUriQueryParams.put( 1865 "uri_identity_hash", identityHashesForTypes.iterator().next()); 1866 } else { 1867 canonicalUrlQuery.append(" rUri.hash_identity in (:uri_identity_hashes) "); 1868 canonicalUriQueryParams.put("uri_identity_hashes", identityHashesForTypes); 1869 } 1870 1871 canonicalUrlQuery.append(" AND r.target_resource_url = rUri.sp_uri )"); 1872 canonicalUrlQuery.append(" WHERE r.src_path = :src_path AND "); 1873 canonicalUrlQuery.append(" r.target_resource_id IS NULL "); 1874 canonicalUrlQuery.append(" AND "); 1875 if (theReverse) { 1876 canonicalUrlQuery.append("rUri.res_id"); 1877 } else { 1878 canonicalUrlQuery.append("r.src_resource_id"); 1879 } 1880 canonicalUrlQuery.append(" IN (:target_pids) "); 1881 1882 return Pair.of(canonicalUrlQuery.toString(), canonicalUriQueryParams); 1883 } 1884 1885 @Nonnull 1886 Set<Long> calculateIndexUriIdentityHashesForResourceTypes(Set<String> theTargetResourceTypes, boolean theReverse) { 1887 Set<String> targetResourceTypes = theTargetResourceTypes; 1888 if (targetResourceTypes == null) { 1889 /* 1890 * If we don't have a list of valid target types, we need to figure out a list of all 1891 * possible target types in order to perform the search of the URI index table. This is 1892 * because the hash_identity column encodes the resource type, so we'll need a hash 1893 * value for each possible target type. 1894 */ 1895 targetResourceTypes = new HashSet<>(); 1896 Set<String> possibleTypes = myDaoRegistry.getRegisteredDaoTypes(); 1897 if (theReverse) { 1898 // For reverse includes, it is really hard to figure out what types 1899 // are actually potentially pointing to the type we're searching for 1900 // in this context, so let's just assume it could be anything. 1901 targetResourceTypes = possibleTypes; 1902 } else { 1903 for (var next : mySearchParamRegistry 1904 .getActiveSearchParams(myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH) 1905 .values() 1906 .stream() 1907 .filter(t -> t.getParamType().equals(RestSearchParameterTypeEnum.REFERENCE)) 1908 .collect(Collectors.toList())) { 1909 1910 // If the reference points to a Reference (ie not a canonical or CanonicalReference) 1911 // then it doesn't matter here anyhow. The logic here only works for elements at the 1912 // root level of the document (e.g. QuestionnaireResponse.subject or 1913 // QuestionnaireResponse.subject.where(...)) but this is just an optimization 1914 // anyhow. 1915 if (next.getPath().startsWith(myResourceName + ".")) { 1916 String elementName = 1917 next.getPath().substring(next.getPath().indexOf('.') + 1); 1918 int secondDotIndex = elementName.indexOf('.'); 1919 if (secondDotIndex != -1) { 1920 elementName = elementName.substring(0, secondDotIndex); 1921 } 1922 BaseRuntimeChildDefinition child = 1923 myContext.getResourceDefinition(myResourceName).getChildByName(elementName); 1924 if (child != null) { 1925 BaseRuntimeElementDefinition<?> childDef = child.getChildByName(elementName); 1926 if (childDef != null) { 1927 if (childDef.getName().equals("Reference")) { 1928 continue; 1929 } 1930 } 1931 } 1932 } 1933 1934 if (!next.getTargets().isEmpty()) { 1935 // For each reference parameter on the resource type we're searching for, 1936 // add all the potential target types to the list of possible target 1937 // resource types we can look up. 1938 for (var nextTarget : next.getTargets()) { 1939 if (possibleTypes.contains(nextTarget)) { 1940 targetResourceTypes.add(nextTarget); 1941 } 1942 } 1943 } else { 1944 // If we have any references that don't define any target types, then 1945 // we need to assume that all enabled resource types are possible target 1946 // types 1947 targetResourceTypes.addAll(possibleTypes); 1948 break; 1949 } 1950 } 1951 } 1952 } 1953 assert !targetResourceTypes.isEmpty(); 1954 1955 return targetResourceTypes.stream() 1956 .map(type -> BaseResourceIndexedSearchParam.calculateHashIdentity( 1957 myPartitionSettings, myRequestPartitionId, type, "url")) 1958 .collect(Collectors.toSet()); 1959 } 1960 1961 private <T> List<Collection<T>> partition(Collection<T> theNextRoundMatches, int theMaxLoad) { 1962 if (theNextRoundMatches.size() <= theMaxLoad) { 1963 return Collections.singletonList(theNextRoundMatches); 1964 } else { 1965 1966 List<Collection<T>> retVal = new ArrayList<>(); 1967 Collection<T> current = null; 1968 for (T next : theNextRoundMatches) { 1969 if (current == null) { 1970 current = new ArrayList<>(theMaxLoad); 1971 retVal.add(current); 1972 } 1973 1974 current.add(next); 1975 1976 if (current.size() >= theMaxLoad) { 1977 current = null; 1978 } 1979 } 1980 1981 return retVal; 1982 } 1983 } 1984 1985 private void attemptComboUniqueSpProcessing( 1986 QueryStack theQueryStack, @Nonnull SearchParameterMap theParams, RequestDetails theRequest) { 1987 RuntimeSearchParam comboParam = null; 1988 List<String> comboParamNames = null; 1989 List<RuntimeSearchParam> exactMatchParams = mySearchParamRegistry.getActiveComboSearchParams( 1990 myResourceName, theParams.keySet(), ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1991 if (!exactMatchParams.isEmpty()) { 1992 comboParam = exactMatchParams.get(0); 1993 comboParamNames = new ArrayList<>(theParams.keySet()); 1994 } 1995 1996 if (comboParam == null) { 1997 List<RuntimeSearchParam> candidateComboParams = mySearchParamRegistry.getActiveComboSearchParams( 1998 myResourceName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 1999 for (RuntimeSearchParam nextCandidate : candidateComboParams) { 2000 List<String> nextCandidateParamNames = 2001 JpaParamUtil.resolveComponentParameters(mySearchParamRegistry, nextCandidate).stream() 2002 .map(RuntimeSearchParam::getName) 2003 .collect(Collectors.toList()); 2004 if (theParams.keySet().containsAll(nextCandidateParamNames)) { 2005 comboParam = nextCandidate; 2006 comboParamNames = nextCandidateParamNames; 2007 break; 2008 } 2009 } 2010 } 2011 2012 if (comboParam != null) { 2013 Collections.sort(comboParamNames); 2014 2015 // Since we're going to remove elements below 2016 theParams.values().forEach(this::ensureSubListsAreWritable); 2017 2018 /* 2019 * Apply search against the combo param index in a loop: 2020 * 2021 * 1. First we check whether the actual parameter values in the 2022 * parameter map are actually usable for searching against the combo 2023 * param index. E.g. no search modifiers, date comparators, etc., 2024 * since these mean you can't use the combo index. 2025 * 2026 * 2. Apply and create the join SQl. We remove parameter values from 2027 * the map as we apply them, so any parameter values remaining in the 2028 * map after each loop haven't yet been factored into the SQL. 2029 * 2030 * The loop allows us to create multiple combo index joins if there 2031 * are multiple AND expressions for the related parameters. 2032 */ 2033 while (validateParamValuesAreValidForComboParam(theRequest, theParams, comboParamNames, comboParam)) { 2034 applyComboSearchParam(theQueryStack, theParams, theRequest, comboParamNames, comboParam); 2035 } 2036 } 2037 } 2038 2039 private void applyComboSearchParam( 2040 QueryStack theQueryStack, 2041 @Nonnull SearchParameterMap theParams, 2042 RequestDetails theRequest, 2043 List<String> theComboParamNames, 2044 RuntimeSearchParam theComboParam) { 2045 2046 List<List<IQueryParameterType>> inputs = new ArrayList<>(); 2047 for (String nextParamName : theComboParamNames) { 2048 List<IQueryParameterType> nextValues = theParams.get(nextParamName).remove(0); 2049 inputs.add(nextValues); 2050 } 2051 2052 List<List<IQueryParameterType>> inputPermutations = Lists.cartesianProduct(inputs); 2053 List<String> indexStrings = new ArrayList<>(CartesianProductUtil.calculateCartesianProductSize(inputs)); 2054 for (List<IQueryParameterType> nextPermutation : inputPermutations) { 2055 2056 StringBuilder searchStringBuilder = new StringBuilder(); 2057 searchStringBuilder.append(myResourceName); 2058 searchStringBuilder.append("?"); 2059 2060 boolean first = true; 2061 for (int paramIndex = 0; paramIndex < theComboParamNames.size(); paramIndex++) { 2062 2063 String nextParamName = theComboParamNames.get(paramIndex); 2064 IQueryParameterType nextOr = nextPermutation.get(paramIndex); 2065 String nextOrValue = nextOr.getValueAsQueryToken(myContext); 2066 2067 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2068 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2069 if (theComboParam.getComboSearchParamType() == ComboSearchParamType.NON_UNIQUE) { 2070 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.STRING) { 2071 nextOrValue = StringUtil.normalizeStringForSearchIndexing(nextOrValue); 2072 } 2073 } 2074 2075 if (first) { 2076 first = false; 2077 } else { 2078 searchStringBuilder.append('&'); 2079 } 2080 2081 nextParamName = UrlUtil.escapeUrlParam(nextParamName); 2082 nextOrValue = UrlUtil.escapeUrlParam(nextOrValue); 2083 2084 searchStringBuilder.append(nextParamName).append('=').append(nextOrValue); 2085 } 2086 2087 String indexString = searchStringBuilder.toString(); 2088 ourLog.debug( 2089 "Checking for {} combo index for query: {}", theComboParam.getComboSearchParamType(), indexString); 2090 2091 indexStrings.add(indexString); 2092 } 2093 2094 // Just to make sure we're stable for tests 2095 indexStrings.sort(Comparator.naturalOrder()); 2096 2097 // Interceptor broadcast: JPA_PERFTRACE_INFO 2098 String indexStringForLog = indexStrings.size() > 1 ? indexStrings.toString() : indexStrings.get(0); 2099 StorageProcessingMessage msg = new StorageProcessingMessage() 2100 .setMessage("Using " + theComboParam.getComboSearchParamType() + " index(es) for query for search: " 2101 + indexStringForLog); 2102 HookParams params = new HookParams() 2103 .add(RequestDetails.class, theRequest) 2104 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2105 .add(StorageProcessingMessage.class, msg); 2106 CompositeInterceptorBroadcaster.doCallHooks( 2107 myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INFO, params); 2108 2109 switch (requireNonNull(theComboParam.getComboSearchParamType())) { 2110 case UNIQUE: 2111 theQueryStack.addPredicateCompositeUnique(indexStrings, myRequestPartitionId); 2112 break; 2113 case NON_UNIQUE: 2114 theQueryStack.addPredicateCompositeNonUnique(indexStrings, myRequestPartitionId); 2115 break; 2116 } 2117 2118 // Remove any empty parameters remaining after this 2119 theParams.clean(); 2120 } 2121 2122 /** 2123 * Returns {@literal true} if the actual parameter instances in a given query are actually usable for 2124 * searching against a combo param with the given parameter names. This might be {@literal false} if 2125 * parameters have modifiers (e.g. <code>?name:exact=SIMPSON</code>), prefixes 2126 * (e.g. <code>?date=gt2024-02-01</code>), etc. 2127 */ 2128 private boolean validateParamValuesAreValidForComboParam( 2129 RequestDetails theRequest, 2130 @Nonnull SearchParameterMap theParams, 2131 List<String> theComboParamNames, 2132 RuntimeSearchParam theComboParam) { 2133 boolean paramValuesAreValidForCombo = true; 2134 List<List<IQueryParameterType>> paramOrValues = new ArrayList<>(theComboParamNames.size()); 2135 2136 for (String nextParamName : theComboParamNames) { 2137 List<List<IQueryParameterType>> nextValues = theParams.get(nextParamName); 2138 2139 if (nextValues == null || nextValues.isEmpty()) { 2140 paramValuesAreValidForCombo = false; 2141 break; 2142 } 2143 2144 List<IQueryParameterType> nextAndValue = nextValues.get(0); 2145 paramOrValues.add(nextAndValue); 2146 2147 for (IQueryParameterType nextOrValue : nextAndValue) { 2148 if (nextOrValue instanceof DateParam) { 2149 DateParam dateParam = (DateParam) nextOrValue; 2150 if (dateParam.getPrecision() != TemporalPrecisionEnum.DAY) { 2151 String message = "Search with params " + theComboParamNames 2152 + " is not a candidate for combo searching - Date search with non-DAY precision for parameter '" 2153 + nextParamName + "'"; 2154 firePerformanceInfo(theRequest, message); 2155 paramValuesAreValidForCombo = false; 2156 break; 2157 } 2158 } 2159 if (nextOrValue instanceof BaseParamWithPrefix) { 2160 BaseParamWithPrefix<?> paramWithPrefix = (BaseParamWithPrefix<?>) nextOrValue; 2161 if (paramWithPrefix.getPrefix() != null) { 2162 String message = "Search with params " + theComboParamNames 2163 + " is not a candidate for combo searching - Parameter '" + nextParamName 2164 + "' has prefix: '" 2165 + paramWithPrefix.getPrefix().getValue() + "'"; 2166 firePerformanceInfo(theRequest, message); 2167 paramValuesAreValidForCombo = false; 2168 break; 2169 } 2170 } 2171 if (isNotBlank(nextOrValue.getQueryParameterQualifier())) { 2172 String message = "Search with params " + theComboParamNames 2173 + " is not a candidate for combo searching - Parameter '" + nextParamName 2174 + "' has modifier: '" + nextOrValue.getQueryParameterQualifier() + "'"; 2175 firePerformanceInfo(theRequest, message); 2176 paramValuesAreValidForCombo = false; 2177 break; 2178 } 2179 } 2180 2181 // Reference params are only eligible for using a composite index if they 2182 // are qualified 2183 RuntimeSearchParam nextParamDef = mySearchParamRegistry.getActiveSearchParam( 2184 myResourceName, nextParamName, ISearchParamRegistry.SearchParamLookupContextEnum.SEARCH); 2185 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.REFERENCE) { 2186 ReferenceParam param = (ReferenceParam) nextValues.get(0).get(0); 2187 if (isBlank(param.getResourceType())) { 2188 ourLog.debug( 2189 "Search is not a candidate for unique combo searching - Reference with no type specified"); 2190 paramValuesAreValidForCombo = false; 2191 break; 2192 } 2193 } 2194 2195 // Date params are not eligible for using composite unique index 2196 // as index could contain date with different precision (e.g. DAY, SECOND) 2197 if (nextParamDef.getParamType() == RestSearchParameterTypeEnum.DATE 2198 && theComboParam.getComboSearchParamType() == ComboSearchParamType.UNIQUE) { 2199 ourLog.debug( 2200 "Search with params {} is not a candidate for combo searching - " 2201 + "Unique combo search parameter '{}' has DATE type", 2202 theComboParamNames, 2203 nextParamName); 2204 paramValuesAreValidForCombo = false; 2205 break; 2206 } 2207 } 2208 2209 if (CartesianProductUtil.calculateCartesianProductSize(paramOrValues) > 500) { 2210 ourLog.debug( 2211 "Search is not a candidate for unique combo searching - Too many OR values would result in too many permutations"); 2212 paramValuesAreValidForCombo = false; 2213 } 2214 2215 return paramValuesAreValidForCombo; 2216 } 2217 2218 private <T> void ensureSubListsAreWritable(List<List<T>> theListOfLists) { 2219 for (int i = 0; i < theListOfLists.size(); i++) { 2220 List<T> oldSubList = theListOfLists.get(i); 2221 if (!(oldSubList instanceof ArrayList)) { 2222 List<T> newSubList = new ArrayList<>(oldSubList); 2223 theListOfLists.set(i, newSubList); 2224 } 2225 } 2226 } 2227 2228 @Override 2229 public void setFetchSize(int theFetchSize) { 2230 myFetchSize = theFetchSize; 2231 } 2232 2233 public SearchParameterMap getParams() { 2234 return myParams; 2235 } 2236 2237 public CriteriaBuilder getBuilder() { 2238 return myCriteriaBuilder; 2239 } 2240 2241 public Class<? extends IBaseResource> getResourceType() { 2242 return myResourceType; 2243 } 2244 2245 public String getResourceName() { 2246 return myResourceName; 2247 } 2248 2249 /** 2250 * IncludesIterator, used to recursively fetch resources from the provided list of PIDs 2251 */ 2252 public class IncludesIterator extends BaseIterator<JpaPid> implements Iterator<JpaPid> { 2253 2254 private final RequestDetails myRequest; 2255 private final Set<JpaPid> myCurrentPids; 2256 private Iterator<JpaPid> myCurrentIterator; 2257 private JpaPid myNext; 2258 2259 IncludesIterator(Set<JpaPid> thePidSet, RequestDetails theRequest) { 2260 myCurrentPids = new HashSet<>(thePidSet); 2261 myCurrentIterator = null; 2262 myRequest = theRequest; 2263 } 2264 2265 private void fetchNext() { 2266 while (myNext == null) { 2267 2268 if (myCurrentIterator == null) { 2269 Set<Include> includes = new HashSet<>(); 2270 if (myParams.containsKey(Constants.PARAM_TYPE)) { 2271 for (List<IQueryParameterType> typeList : myParams.get(Constants.PARAM_TYPE)) { 2272 for (IQueryParameterType type : typeList) { 2273 String queryString = ParameterUtil.unescape(type.getValueAsQueryToken(myContext)); 2274 for (String resourceType : queryString.split(",")) { 2275 String rt = resourceType.trim(); 2276 if (isNotBlank(rt)) { 2277 includes.add(new Include(rt + ":*", true)); 2278 } 2279 } 2280 } 2281 } 2282 } 2283 if (includes.isEmpty()) { 2284 includes.add(new Include("*", true)); 2285 } 2286 Set<JpaPid> newPids = loadIncludes( 2287 myContext, 2288 myEntityManager, 2289 myCurrentPids, 2290 includes, 2291 false, 2292 getParams().getLastUpdated(), 2293 mySearchUuid, 2294 myRequest, 2295 null); 2296 myCurrentIterator = newPids.iterator(); 2297 } 2298 2299 if (myCurrentIterator.hasNext()) { 2300 myNext = myCurrentIterator.next(); 2301 } else { 2302 myNext = NO_MORE; 2303 } 2304 } 2305 } 2306 2307 @Override 2308 public boolean hasNext() { 2309 fetchNext(); 2310 return !NO_MORE.equals(myNext); 2311 } 2312 2313 @Override 2314 public JpaPid next() { 2315 fetchNext(); 2316 JpaPid retVal = myNext; 2317 myNext = null; 2318 return retVal; 2319 } 2320 } 2321 2322 /** 2323 * Basic Query iterator, used to fetch the results of a query. 2324 */ 2325 private final class QueryIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> { 2326 2327 private final SearchRuntimeDetails mySearchRuntimeDetails; 2328 private final RequestDetails myRequest; 2329 private final boolean myHaveRawSqlHooks; 2330 private final boolean myHavePerfTraceFoundIdHook; 2331 private final SortSpec mySort; 2332 private final Integer myOffset; 2333 private boolean myFirst = true; 2334 private IncludesIterator myIncludesIterator; 2335 /** 2336 * The next JpaPid value of the next result in this query. 2337 * Will not be null if fetched using getNext() 2338 */ 2339 private JpaPid myNext; 2340 /** 2341 * The current query result iterator running sql and supplying PIDs 2342 * @see #myQueryList 2343 */ 2344 private ISearchQueryExecutor myResultsIterator; 2345 2346 private boolean myFetchIncludesForEverythingOperation; 2347 /** 2348 * The count of resources skipped because they were seen in earlier results 2349 */ 2350 private int mySkipCount = 0; 2351 /** 2352 * The count of resources that are new in this search 2353 * (ie, not cached in previous searches) 2354 */ 2355 private int myNonSkipCount = 0; 2356 2357 /** 2358 * The list of queries to use to find all results. 2359 * Normal JPA queries will normally have a single entry. 2360 * Queries that involve Hibernate Search/Elastisearch may have 2361 * multiple queries because of chunking. 2362 * The $everything operation also jams some extra results in. 2363 */ 2364 private List<ISearchQueryExecutor> myQueryList = new ArrayList<>(); 2365 2366 private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) { 2367 mySearchRuntimeDetails = theSearchRuntimeDetails; 2368 mySort = myParams.getSort(); 2369 myOffset = myParams.getOffset(); 2370 myRequest = theRequest; 2371 2372 // everything requires fetching recursively all related resources 2373 if (myParams.getEverythingMode() != null) { 2374 myFetchIncludesForEverythingOperation = true; 2375 } 2376 2377 myHavePerfTraceFoundIdHook = CompositeInterceptorBroadcaster.hasHooks( 2378 Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, myInterceptorBroadcaster, myRequest); 2379 myHaveRawSqlHooks = CompositeInterceptorBroadcaster.hasHooks( 2380 Pointcut.JPA_PERFTRACE_RAW_SQL, myInterceptorBroadcaster, myRequest); 2381 } 2382 2383 private void fetchNext() { 2384 try { 2385 if (myHaveRawSqlHooks) { 2386 CurrentThreadCaptureQueriesListener.startCapturing(); 2387 } 2388 2389 // If we don't have a query yet, create one 2390 if (myResultsIterator == null) { 2391 if (myMaxResultsToFetch == null) { 2392 myMaxResultsToFetch = calculateMaxResultsToFetch(); 2393 } 2394 2395 /* 2396 * assigns the results iterator 2397 * and populates the myQueryList. 2398 */ 2399 initializeIteratorQuery(myOffset, myMaxResultsToFetch); 2400 } 2401 2402 if (myNext == null) { 2403 // no next means we need a new query (if one is available) 2404 while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) { 2405 /* 2406 * Because we combine our DB searches with Lucene 2407 * sometimes we can have multiple results iterators 2408 * (with only some having data in them to extract). 2409 * 2410 * We'll iterate our results iterators until we 2411 * either run out of results iterators, or we 2412 * have one that actually has data in it. 2413 */ 2414 while (!myResultsIterator.hasNext() && !myQueryList.isEmpty()) { 2415 retrieveNextIteratorQuery(); 2416 } 2417 2418 if (!myResultsIterator.hasNext()) { 2419 // we couldn't find a results iterator; 2420 // we're done here 2421 break; 2422 } 2423 2424 Long nextLong = myResultsIterator.next(); 2425 if (myHavePerfTraceFoundIdHook) { 2426 callPerformanceTracingHook(nextLong); 2427 } 2428 2429 if (nextLong != null) { 2430 JpaPid next = JpaPid.fromId(nextLong); 2431 if (myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2432 myNext = next; 2433 myNonSkipCount++; 2434 break; 2435 } else { 2436 mySkipCount++; 2437 } 2438 } 2439 2440 if (!myResultsIterator.hasNext()) { 2441 if (myMaxResultsToFetch != null && (mySkipCount + myNonSkipCount == myMaxResultsToFetch)) { 2442 if (mySkipCount > 0 && myNonSkipCount == 0) { 2443 2444 sendProcessingMsgAndFirePerformanceHook(); 2445 2446 myMaxResultsToFetch += 1000; 2447 initializeIteratorQuery(myOffset, myMaxResultsToFetch); 2448 } 2449 } 2450 } 2451 } 2452 } 2453 2454 if (myNext == null) { 2455 // if we got here, it means the current JpaPid has already been processed, 2456 // and we will decide (here) if we need to fetch related resources recursively 2457 if (myFetchIncludesForEverythingOperation) { 2458 myIncludesIterator = new IncludesIterator(myPidSet, myRequest); 2459 myFetchIncludesForEverythingOperation = false; 2460 } 2461 if (myIncludesIterator != null) { 2462 while (myIncludesIterator.hasNext()) { 2463 JpaPid next = myIncludesIterator.next(); 2464 if (next != null && myPidSet.add(next) && doNotSkipNextPidForEverything()) { 2465 myNext = next; 2466 break; 2467 } 2468 } 2469 if (myNext == null) { 2470 myNext = NO_MORE; 2471 } 2472 } else { 2473 myNext = NO_MORE; 2474 } 2475 } 2476 2477 mySearchRuntimeDetails.setFoundMatchesCount(myPidSet.size()); 2478 2479 } finally { 2480 // search finished - fire hooks 2481 if (myHaveRawSqlHooks) { 2482 callRawSqlHookWithCurrentThreadQueries(myRequest); 2483 } 2484 } 2485 2486 if (myFirst) { 2487 HookParams params = new HookParams() 2488 .add(RequestDetails.class, myRequest) 2489 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2490 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2491 CompositeInterceptorBroadcaster.doCallHooks( 2492 myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FIRST_RESULT_LOADED, params); 2493 myFirst = false; 2494 } 2495 2496 if (NO_MORE.equals(myNext)) { 2497 HookParams params = new HookParams() 2498 .add(RequestDetails.class, myRequest) 2499 .addIfMatchesType(ServletRequestDetails.class, myRequest) 2500 .add(SearchRuntimeDetails.class, mySearchRuntimeDetails); 2501 CompositeInterceptorBroadcaster.doCallHooks( 2502 myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_SELECT_COMPLETE, params); 2503 } 2504 } 2505 2506 private Integer calculateMaxResultsToFetch() { 2507 if (myParams.getLoadSynchronousUpTo() != null) { 2508 return myParams.getLoadSynchronousUpTo(); 2509 } else if (myParams.getOffset() != null && myParams.getCount() != null) { 2510 return myParams.getEverythingMode() != null 2511 ? myParams.getOffset() + myParams.getCount() 2512 : myParams.getCount(); 2513 } else { 2514 return myStorageSettings.getFetchSizeDefaultMaximum(); 2515 } 2516 } 2517 2518 private boolean doNotSkipNextPidForEverything() { 2519 return !(myParams.getEverythingMode() != null && (myOffset != null && myOffset >= myPidSet.size())); 2520 } 2521 2522 private void callPerformanceTracingHook(Long theNextLong) { 2523 HookParams params = new HookParams() 2524 .add(Integer.class, System.identityHashCode(this)) 2525 .add(Object.class, theNextLong); 2526 CompositeInterceptorBroadcaster.doCallHooks( 2527 myInterceptorBroadcaster, myRequest, Pointcut.JPA_PERFTRACE_SEARCH_FOUND_ID, params); 2528 } 2529 2530 private void sendProcessingMsgAndFirePerformanceHook() { 2531 String msg = "Pass completed with no matching results seeking rows " 2532 + myPidSet.size() + "-" + mySkipCount 2533 + ". This indicates an inefficient query! Retrying with new max count of " 2534 + myMaxResultsToFetch; 2535 firePerformanceWarning(myRequest, msg); 2536 } 2537 2538 private void initializeIteratorQuery(Integer theOffset, Integer theMaxResultsToFetch) { 2539 Integer offset = theOffset; 2540 if (myQueryList.isEmpty()) { 2541 // Capture times for Lucene/Elasticsearch queries as well 2542 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2543 2544 // setting offset to 0 to fetch all resource ids to guarantee 2545 // correct output result for everything operation during paging 2546 if (myParams.getEverythingMode() != null) { 2547 offset = 0; 2548 } 2549 myQueryList = createQuery( 2550 myParams, mySort, offset, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails); 2551 } 2552 2553 mySearchRuntimeDetails.setQueryStopwatch(new StopWatch()); 2554 2555 retrieveNextIteratorQuery(); 2556 2557 mySkipCount = 0; 2558 myNonSkipCount = 0; 2559 } 2560 2561 private void retrieveNextIteratorQuery() { 2562 close(); 2563 if (isNotEmpty(myQueryList)) { 2564 myResultsIterator = myQueryList.remove(0); 2565 myHasNextIteratorQuery = true; 2566 } else { 2567 myResultsIterator = SearchQueryExecutor.emptyExecutor(); 2568 myHasNextIteratorQuery = false; 2569 } 2570 } 2571 2572 @Override 2573 public boolean hasNext() { 2574 if (myNext == null) { 2575 fetchNext(); 2576 } 2577 return !NO_MORE.equals(myNext); 2578 } 2579 2580 @Override 2581 public JpaPid next() { 2582 fetchNext(); 2583 JpaPid retVal = myNext; 2584 myNext = null; 2585 Validate.isTrue(!NO_MORE.equals(retVal), "No more elements"); 2586 return retVal; 2587 } 2588 2589 @Override 2590 public int getSkippedCount() { 2591 return mySkipCount; 2592 } 2593 2594 @Override 2595 public int getNonSkippedCount() { 2596 return myNonSkipCount; 2597 } 2598 2599 @Override 2600 public Collection<JpaPid> getNextResultBatch(long theBatchSize) { 2601 Collection<JpaPid> batch = new ArrayList<>(); 2602 while (this.hasNext() && batch.size() < theBatchSize) { 2603 batch.add(this.next()); 2604 } 2605 return batch; 2606 } 2607 2608 @Override 2609 public void close() { 2610 if (myResultsIterator != null) { 2611 myResultsIterator.close(); 2612 } 2613 myResultsIterator = null; 2614 } 2615 } 2616 2617 private void firePerformanceInfo(RequestDetails theRequest, String theMessage) { 2618 // Only log at debug level since these messages aren't considered important enough 2619 // that we should be cluttering the system log, but they are important to the 2620 // specific query being executed to we'll INFO level them there 2621 ourLog.debug(theMessage); 2622 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_INFO); 2623 } 2624 2625 private void firePerformanceWarning(RequestDetails theRequest, String theMessage) { 2626 ourLog.warn(theMessage); 2627 firePerformanceMessage(theRequest, theMessage, Pointcut.JPA_PERFTRACE_WARNING); 2628 } 2629 2630 private void firePerformanceMessage(RequestDetails theRequest, String theMessage, Pointcut pointcut) { 2631 StorageProcessingMessage message = new StorageProcessingMessage(); 2632 message.setMessage(theMessage); 2633 HookParams params = new HookParams() 2634 .add(RequestDetails.class, theRequest) 2635 .addIfMatchesType(ServletRequestDetails.class, theRequest) 2636 .add(StorageProcessingMessage.class, message); 2637 CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, pointcut, params); 2638 } 2639 2640 public static int getMaximumPageSize() { 2641 if (myMaxPageSizeForTests != null) { 2642 return myMaxPageSizeForTests; 2643 } 2644 return MAXIMUM_PAGE_SIZE; 2645 } 2646 2647 public static void setMaxPageSizeForTest(Integer theTestSize) { 2648 myMaxPageSizeForTests = theTestSize; 2649 } 2650}