Skip to content

Commit

Permalink
Remove @transactional from JDBC DAO implementations; this should be d…
Browse files Browse the repository at this point in the history
…eclared at the service layer to avoid unnecessary transaction proxy creation.
  • Loading branch information
msqr committed Jul 4, 2024
1 parent 4c4ff4b commit 72b94ed
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -354,54 +354,56 @@ private Iterable<DatumExportResource> exportToResources(Configuration config) {
.toInstant());
}

try (DatumExportOutputFormatService.ExportContext exportContext = outputService
.createExportContext(config.getOutputConfiguration())) {
return doWithinOptionalTransaction(() -> {
try (DatumExportOutputFormatService.ExportContext exportContext = outputService
.createExportContext(config.getOutputConfiguration())) {

BasicBulkExportOptions options = new BasicBulkExportOptions(DATUM_EXPORT_NAME,
singletonMap(DatumEntityDao.EXPORT_PARAMETER_DATUM_CRITERIA, filter));
BasicBulkExportOptions options = new BasicBulkExportOptions(DATUM_EXPORT_NAME,
singletonMap(DatumEntityDao.EXPORT_PARAMETER_DATUM_CRITERIA, filter));

final QueryAuditor auditor = queryAuditor;
if ( auditor != null ) {
auditor.resetCurrentAuditResults();
}

// all exported data will be audited on the hour we start the export at
GeneralNodeDatumPK auditDatumKey = new GeneralNodeDatumPK();
auditDatumKey.setCreated(Instant.now().truncatedTo(ChronoUnit.HOURS));

datumDao.bulkExport(new ExportCallback<GeneralNodeDatumFilterMatch>() {

@Override
public void didBegin(Long totalResultCountEstimate) {
try {
exportContext
.start(totalResultCountEstimate != null ? totalResultCountEstimate
: COUNT_UNKNOWN);
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);
}
final QueryAuditor auditor = queryAuditor;
if ( auditor != null ) {
auditor.resetCurrentAuditResults();
}

@Override
public ExportCallbackAction handle(GeneralNodeDatumFilterMatch d) {
if ( d != null && d.getId() != null && auditor != null ) {
auditDatumKey.setNodeId(d.getId().getNodeId());
auditDatumKey.setSourceId(d.getId().getSourceId());
auditor.addNodeDatumAuditResults(singletonMap(auditDatumKey, 1));
// all exported data will be audited on the hour we start the export at
GeneralNodeDatumPK auditDatumKey = new GeneralNodeDatumPK();
auditDatumKey.setCreated(Instant.now().truncatedTo(ChronoUnit.HOURS));

datumDao.bulkExport(new ExportCallback<GeneralNodeDatumFilterMatch>() {

@Override
public void didBegin(Long totalResultCountEstimate) {
try {
exportContext.start(
totalResultCountEstimate != null ? totalResultCountEstimate
: COUNT_UNKNOWN);
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);
}
}
try {
exportContext.appendDatumMatch(singleton(d), DatumExportTask.this);
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);

@Override
public ExportCallbackAction handle(GeneralNodeDatumFilterMatch d) {
if ( d != null && d.getId() != null && auditor != null ) {
auditDatumKey.setNodeId(d.getId().getNodeId());
auditDatumKey.setSourceId(d.getId().getSourceId());
auditor.addNodeDatumAuditResults(singletonMap(auditDatumKey, 1));
}
try {
exportContext.appendDatumMatch(singleton(d), DatumExportTask.this);
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);
}
return ExportCallbackAction.CONTINUE;
}
return ExportCallbackAction.CONTINUE;
}
}, options);
}, options);

return exportContext.finish();
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);
}
return exportContext.finish();
} catch ( IOException e ) {
throw new DatumExportException(info.getId(), e.getMessage(), e);
}
});
}

private AggregateGeneralNodeDatumFilter policyEnforcer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,6 @@
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import net.solarnetwork.central.common.dao.jdbc.CountPreparedStatementCreatorProvider;
import net.solarnetwork.central.datum.domain.DatumReadingType;
import net.solarnetwork.central.datum.domain.GeneralLocationDatum;
Expand Down Expand Up @@ -146,7 +144,7 @@
* {@link JdbcOperations} based implementation of {@link DatumEntityDao}.
*
* @author matt
* @version 2.5
* @version 2.6
* @since 3.8
*/
public class JdbcDatumEntityDao
Expand Down Expand Up @@ -684,7 +682,6 @@ private static RowMapper<ReadingDatum> readingMapper(Aggregation agg) {
}
}

@Transactional(readOnly = true, propagation = Propagation.SUPPORTS)
@Override
public ObjectDatumStreamFilterResults<ReadingDatum, DatumPK> findDatumReadingFiltered(
DatumCriteria filter) {
Expand Down Expand Up @@ -730,7 +727,6 @@ public ObjectDatumStreamFilterResults<ReadingDatum, DatumPK> findDatumReadingFil

}

@Transactional(readOnly = true, propagation = Propagation.REQUIRED)
@Override
public ExportResult bulkExport(ExportCallback<GeneralNodeDatumFilterMatch> callback,
ExportOptions options) {
Expand Down Expand Up @@ -766,7 +762,7 @@ public ExportResult bulkExport(ExportCallback<GeneralNodeDatumFilterMatch> callb
if ( combining != null ) {
sqlProps.put(PARAM_COMBINING, combining);
}
// get query name to execute
String query = getQueryForFilter(filter);
*/
Expand Down

0 comments on commit 72b94ed

Please sign in to comment.