
001package ca.uhn.fhir.jpa.bulk.imprt.job; 002 003/*- 004 * #%L 005 * HAPI FHIR JPA Server 006 * %% 007 * Copyright (C) 2014 - 2022 Smile CDR, Inc. 008 * %% 009 * Licensed under the Apache License, Version 2.0 (the "License"); 010 * you may not use this file except in compliance with the License. 011 * You may obtain a copy of the License at 012 * 013 * http://www.apache.org/licenses/LICENSE-2.0 014 * 015 * Unless required by applicable law or agreed to in writing, software 016 * distributed under the License is distributed on an "AS IS" BASIS, 017 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 018 * See the License for the specific language governing permissions and 019 * limitations under the License. 020 * #L% 021 */ 022 023import ca.uhn.fhir.jpa.batch.config.BatchConstants; 024import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc; 025import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson; 026import org.slf4j.Logger; 027import org.springframework.batch.core.partition.support.Partitioner; 028import org.springframework.batch.item.ExecutionContext; 029import org.springframework.beans.factory.annotation.Autowired; 030import org.springframework.beans.factory.annotation.Value; 031 032import javax.annotation.Nonnull; 033import java.util.HashMap; 034import java.util.Map; 035 036import static org.slf4j.LoggerFactory.getLogger; 037 038public class BulkImportPartitioner implements Partitioner { 039 public static final String FILE_INDEX = "fileIndex"; 040 public static final String FILE_DESCRIPTION = "fileDescription"; 041 public static final String JOB_DESCRIPTION = "jobDescription"; 042 public static final String ROW_PROCESSING_MODE = "rowProcessingMode"; 043 044 private static final Logger ourLog = getLogger(BulkImportPartitioner.class); 045 046 @Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}") 047 private String myJobUUID; 048 049 @Autowired 050 private IBulkDataImportSvc myBulkDataImportSvc; 051 052 @Nonnull 053 @Override 054 public Map<String, ExecutionContext> partition(int gridSize) { 055 Map<String, ExecutionContext> retVal = new HashMap<>(); 056 057 BulkImportJobJson job = myBulkDataImportSvc.fetchJob(myJobUUID); 058 059 for (int i = 0; i < job.getFileCount(); i++) { 060 061 String fileDescription = myBulkDataImportSvc.getFileDescription(myJobUUID, i); 062 063 ExecutionContext context = new ExecutionContext(); 064 context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID); 065 context.putInt(FILE_INDEX, i); 066 context.put(ROW_PROCESSING_MODE, job.getProcessingMode()); 067 context.put(JOB_DESCRIPTION, job.getJobDescription()); 068 context.put(FILE_DESCRIPTION, fileDescription); 069 070 String key = "FILE" + i + ":" + fileDescription; 071 retVal.put(key, context); 072 } 073 074 return retVal; 075 } 076 077 078}