Skip to content

Commit

Permalink
Patch/2.40.2 15645 15661 (#15722)
Browse files Browse the repository at this point in the history
* fix: always flush the batch handler for CompleteDataSet import [DHIS2-15362] (2.39) (#15625) (#15710)

* fix: always flush the batch handler for CompleteDataSet import [DHIS2-15362]

* fix: test setup

* fix: controller imports use interface not impl

* Use try-with-resources to close BatchHandler (#15661) (#15704) (#15711)

* Use try-with-resources to close BatchHandler

* Rework test to deal with multi-threading

* Add try-catch to Analytics table manager

---------

Co-authored-by: Jan Bernitt <[email protected]>
  • Loading branch information
jason-p-pickering and jbee authored Nov 16, 2023
1 parent d8b8733 commit c65911f
Show file tree
Hide file tree
Showing 11 changed files with 128 additions and 177 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -177,30 +177,32 @@ private void populateTableInternal(AnalyticsTablePartition partition, String sql
List<String> columnNames =
getDimensionColumns().stream().map(AnalyticsTableColumn::getName).collect(toList());

MappingBatchHandler batchHandler =
try (MappingBatchHandler batchHandler =
MappingBatchHandler.builder()
.jdbcConfiguration(jdbcConfiguration)
.tableName(partition.getTempTableName())
.columns(columnNames)
.build();

batchHandler.init();

JdbcOwnershipWriter writer = JdbcOwnershipWriter.getInstance(batchHandler);
AtomicInteger queryRowCount = new AtomicInteger();

jdbcTemplate.query(
sql,
resultSet -> {
writer.write(getRowMap(columnNames, resultSet));
queryRowCount.getAndIncrement();
});

log.info(
"OwnershipAnalytics query row count was {} for {}",
queryRowCount,
partition.getTempTableName());
batchHandler.flush();
.build()) {
batchHandler.init();

JdbcOwnershipWriter writer = JdbcOwnershipWriter.getInstance(batchHandler);
AtomicInteger queryRowCount = new AtomicInteger();

jdbcTemplate.query(
sql,
resultSet -> {
writer.write(getRowMap(columnNames, resultSet));
queryRowCount.getAndIncrement();
});

log.info(
"OwnershipAnalytics query row count was {} for {}",
queryRowCount,
partition.getTempTableName());
batchHandler.flush();
} catch (Exception ex) {
log.error("Failed to alter table ownership: ", ex);
}
}

private String getInputSql(Program program) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.hisp.dhis.common.IdSchemes;
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.hisp.dhis.scheduling.JobConfiguration;

/**
* Import/export service for {@link CompleteDataSetRegistration data set completion registrations}.
Expand Down Expand Up @@ -109,17 +108,6 @@ void writeCompleteDataSetRegistrationsJson(
*/
ImportSummary saveCompleteDataSetRegistrationsXml(InputStream in, ImportOptions importOptions);

/**
* Imports {@link CompleteDataSetRegistrations} from an XML payload.
*
* @param in the stream providing the XML payload.
* @param importOptions the options for the import.
* @param jobId the task (optional).
* @return a summary of the import process.
*/
ImportSummary saveCompleteDataSetRegistrationsXml(
InputStream in, ImportOptions importOptions, JobConfiguration jobId);

/**
* Imports {@link CompleteDataSetRegistrations} from a JSON payload.
*
Expand All @@ -129,22 +117,10 @@ ImportSummary saveCompleteDataSetRegistrationsXml(
*/
ImportSummary saveCompleteDataSetRegistrationsJson(InputStream in, ImportOptions importOptions);

/**
* Imports {@link CompleteDataSetRegistrations} from a JSON payload.
*
* @param in the stream providing the XML payload.
* @param importOptions the options for the import.
* @param jobId the task (optional).
* @return a summary of the import process.
*/
ImportSummary saveCompleteDataSetRegistrationsJson(
InputStream in, ImportOptions importOptions, JobConfiguration jobId);

/**
* Validates the given {@link ExportParams}.
*
* @param params the export parameters.
* @throws IllegalQueryException if validation failed.
*/
void validate(ExportParams params);
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,14 @@
package org.hisp.dhis.dxf2.dataset;

import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import javax.annotation.Nonnull;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
Expand Down Expand Up @@ -76,10 +78,7 @@
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.scheduling.JobConfiguration;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.system.notification.NotificationLevel;
import org.hisp.dhis.system.notification.Notifier;
import org.hisp.dhis.system.util.Clock;
import org.hisp.dhis.system.util.ValidationUtils;
import org.hisp.dhis.user.CurrentUserService;
Expand All @@ -89,6 +88,7 @@
import org.hisp.quick.BatchHandlerFactory;
import org.hisp.staxwax.factory.XMLFactory;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

/**
* @author Halvdan Hoem Grelland
Expand All @@ -113,8 +113,6 @@ public class DefaultCompleteDataSetRegistrationExchangeService

private final OrganisationUnitService orgUnitService;

private final Notifier notifier;

private final I18nManager i18nManager;

private final BatchHandlerFactory batchHandlerFactory;
Expand Down Expand Up @@ -198,6 +196,7 @@ public ExportParams paramsFromUrl(
}

@Override
@Transactional
public void writeCompleteDataSetRegistrationsXml(ExportParams params, OutputStream out) {
decideAccess(params);
validate(params);
Expand All @@ -206,6 +205,7 @@ public void writeCompleteDataSetRegistrationsXml(ExportParams params, OutputStre
}

@Override
@Transactional
public void writeCompleteDataSetRegistrationsJson(ExportParams params, OutputStream out) {
decideAccess(params);
validate(params);
Expand All @@ -214,52 +214,60 @@ public void writeCompleteDataSetRegistrationsJson(ExportParams params, OutputStr
}

@Override
@Transactional
public void writeCompleteDataSetRegistrationsJson(
Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes) {
cdsrStore.writeCompleteDataSetRegistrationsJson(lastUpdated, outputStream, idSchemes);
}

@Override
@Transactional
public ImportSummary saveCompleteDataSetRegistrationsXml(
InputStream in, ImportOptions importOptions) {
return saveCompleteDataSetRegistrationsXml(in, importOptions, null);
return saveCompleteDataSetRegistrations(importOptions, () -> readRegistrationsFromXml(in));
}

@Override
public ImportSummary saveCompleteDataSetRegistrationsXml(
InputStream in, ImportOptions importOptions, JobConfiguration jobId) {
try {
in = StreamUtils.wrapAndCheckCompressionFormat(in);
CompleteDataSetRegistrations completeDataSetRegistrations =
new StreamingXmlCompleteDataSetRegistrations(XMLFactory.getXMLReader(in));

return saveCompleteDataSetRegistrations(importOptions, jobId, completeDataSetRegistrations);
} catch (Exception ex) {
return handleImportError(jobId, ex);
}
@Nonnull
private static CompleteDataSetRegistrations readRegistrationsFromXml(InputStream in)
throws IOException {
in = StreamUtils.wrapAndCheckCompressionFormat(in);
return new StreamingXmlCompleteDataSetRegistrations(XMLFactory.getXMLReader(in));
}

@Override
@Transactional
public ImportSummary saveCompleteDataSetRegistrationsJson(
InputStream in, ImportOptions importOptions) {
return saveCompleteDataSetRegistrationsJson(in, importOptions, null);
return saveCompleteDataSetRegistrations(importOptions, () -> readRegistrationsFromJson(in));
}

@Override
public ImportSummary saveCompleteDataSetRegistrationsJson(
InputStream in, ImportOptions importOptions, JobConfiguration jobId) {
try {
in = StreamUtils.wrapAndCheckCompressionFormat(in);
private ImportSummary saveCompleteDataSetRegistrations(
ImportOptions importOptions,
Callable<CompleteDataSetRegistrations> deserializeRegistrations) {

try (BatchHandler<CompleteDataSetRegistration> batchHandler =
batchHandlerFactory.createBatchHandler(CompleteDataSetRegistrationBatchHandler.class)) {
CompleteDataSetRegistrations completeDataSetRegistrations = deserializeRegistrations.call();
ImportSummary summary =
saveCompleteDataSetRegistrations(
importOptions, completeDataSetRegistrations, batchHandler);

CompleteDataSetRegistrations completeDataSetRegistrations =
jsonMapper.readValue(in, CompleteDataSetRegistrations.class);
batchHandler.flush();

return saveCompleteDataSetRegistrations(importOptions, jobId, completeDataSetRegistrations);
return summary;
} catch (Exception ex) {
return handleImportError(jobId, ex);
log.error("Complete data set registrations could not be saved.");
return handleImportError(ex);
}
}

@Nonnull
private CompleteDataSetRegistrations readRegistrationsFromJson(InputStream in)
throws IOException {
in = StreamUtils.wrapAndCheckCompressionFormat(in);
return jsonMapper.readValue(in, CompleteDataSetRegistrations.class);
}

@Override
public void validate(ExportParams params) throws IllegalQueryException {
ErrorMessage error = null;
Expand Down Expand Up @@ -357,21 +365,17 @@ private void decideAccess(ExportParams params) throws IllegalQueryException {
}
}

private ImportSummary handleImportError(JobConfiguration jobId, Throwable ex) {
private ImportSummary handleImportError(Throwable ex) {
log.error(DebugUtils.getStackTrace(ex));
notifier.notify(jobId, NotificationLevel.ERROR, "Process failed: " + ex.getMessage(), true);
return new ImportSummary(ImportStatus.ERROR, "The import process failed: " + ex.getMessage());
}

private ImportSummary saveCompleteDataSetRegistrations(
ImportOptions importOptions,
JobConfiguration id,
CompleteDataSetRegistrations completeRegistrations) {
CompleteDataSetRegistrations completeRegistrations,
BatchHandler<CompleteDataSetRegistration> batchHandler) {
Clock clock =
new Clock(log)
.startClock()
.logTime("Starting complete data set registration import, options: " + importOptions);
notifier.clear(id).notify(id, "Process started");
new Clock(log).startClock().logTime("Starting complete data set registration import");

// Start here so we can access any outer attributes for the
// configuration
Expand Down Expand Up @@ -407,14 +411,9 @@ private ImportSummary saveCompleteDataSetRegistrations(
// Perform import
// ---------------------------------------------------------------------

notifier.notify(id, "Importing complete data set registrations");

int totalCount =
batchImport(completeRegistrations, cfg, importSummary, metaDataCallables, caches);

notifier
.notify(id, NotificationLevel.INFO, "Import done", true)
.addJobSummary(id, importSummary, ImportSummary.class);
batchImport(
completeRegistrations, cfg, importSummary, metaDataCallables, caches, batchHandler);

ImportCount count = importSummary.getImportCount();

Expand All @@ -436,16 +435,14 @@ private int batchImport(
ImportConfig config,
ImportSummary summary,
MetadataCallables mdCallables,
MetadataCaches mdCaches) {
MetadataCaches mdCaches,
BatchHandler<CompleteDataSetRegistration> batchHandler) {
final User currentUser = currentUserService.getCurrentUser();
final String currentUserName = currentUser.getUsername();
final Set<OrganisationUnit> userOrgUnits = currentUserService.getCurrentUserOrganisationUnits();
final I18n i18n = i18nManager.getI18n();

BatchHandler<CompleteDataSetRegistration> batchHandler =
batchHandlerFactory
.createBatchHandler(CompleteDataSetRegistrationBatchHandler.class)
.init();
batchHandler.init();

int importCount = 0, updateCount = 0, deleteCount = 0, totalCount = 0;

Expand Down Expand Up @@ -615,8 +612,6 @@ private int batchImport(
}
}

batchHandler.flush();

finalizeSummary(summary, totalCount, importCount, updateCount, deleteCount);

return totalCount;
Expand Down
Loading

0 comments on commit c65911f

Please sign in to comment.