Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
# Changelog

## 2.1.3 [2026-02-03]
### Fixed
- Fixed the way we retrieve the data processing context when we process raw responses
- Change variable reading type from int to long

## 2.1.2 [2026-01-26]
### Fixed
- Fixed numbers for pairwise
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>fr.insee.genesis</groupId>
<artifactId>genesis-api</artifactId>
<version>2.1.2</version>
<version>2.1.3</version>
<packaging>jar</packaging>
<name>genesis-api</name>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,8 @@ public DataProcessingContextModel getContext(String interrogationId) throws Gene

@Override
public DataProcessingContextModel getContextByCollectionInstrumentId(String collectionInstrumentId){
return DataProcessingContextMapper.INSTANCE.documentToModel(
dataProcessingContextPersistancePort.findByPartitionId(collectionInstrumentId)
);
return dataProcessingContextPersistancePort.findByCollectionInstrumentId(collectionInstrumentId)
;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ public DataProcessResult processRawResponses(String collectionInstrumentId, List
//Send processed ids grouped by questionnaire (if review activated)
if(dataProcessingContext != null && dataProcessingContext.isWithReview()) {
sendProcessedIdsToQualityTool(surveyUnitModels);
} else {
log.warn("Data processing context not found for collection instrument {}. Ids processed not send to quality tool.",collectionInstrumentId);
}

//Remove processed ids from list
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,66 +210,6 @@ void getUnprocessedDataTest_processDate_present(){
//json
@Test
void processJsonRawDataTest(){
//GIVEN
lunaticJsonRawDataPersistanceStub.getMongoStub().clear();
surveyUnitPersistencePortStub.getMongoStub().clear();
surveyUnitQualityToolPerretAdapterStub.getReceivedMaps().clear();
String campaignId = "SAMPLETEST-PARADATA-V2";
String questionnaireId = campaignId + "_quest";
String interrogationId = "testinterrogationId1";
String idUE = "testIdUE1";
String varName = "AVIS_MAIL";
String varValue = "TEST";
addJsonRawDataDocumentToStub(campaignId, questionnaireId, interrogationId, idUE, null, LocalDateTime.now(),varName
, varValue);

dataProcessingContextPersistancePortStub.getMongoStub().add(
DataProcessingContextMapper.INSTANCE.modelToDocument(
DataProcessingContextModel.builder()
.partitionId(campaignId)
.kraftwerkExecutionScheduleList(new ArrayList<>())
.withReview(true)
.build()
)
);


List<String> interrogationIdList = new ArrayList<>();
interrogationIdList.add(interrogationId);

//WHEN
rawResponseController.processJsonRawData(campaignId, questionnaireId, interrogationIdList);


//THEN
//Genesis model survey unit created successfully
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub()).isNotNull().isNotEmpty().hasSize(1);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst()).isNotNull();
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCampaignId()).isEqualTo(campaignId);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCollectionInstrumentId()).isNotNull().isEqualTo(questionnaireId);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getMode()).isNotNull().isEqualTo(Mode.WEB);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getInterrogationId()).isEqualTo(interrogationId);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getUsualSurveyUnitId()).isEqualTo(idUE);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getFileDate()).isNotNull();
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getRecordDate()).isNotNull();
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCollectedVariables()).isNotNull().isNotEmpty().hasSize(1);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCollectedVariables().getFirst()).isNotNull();
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCollectedVariables().getFirst().varId()).isNotNull().isEqualTo(varName);
Assertions.assertThat(surveyUnitPersistencePortStub.getMongoStub().getFirst().getCollectedVariables().getFirst().value()).isNotNull().isEqualTo(varValue);

//Process date check
Assertions.assertThat(lunaticJsonRawDataPersistanceStub.getMongoStub().getFirst().processDate()).isNotNull();

//Perret call check
Assertions.assertThat(surveyUnitQualityToolPerretAdapterStub.getReceivedMaps())
.hasSize(1);
Assertions.assertThat(surveyUnitQualityToolPerretAdapterStub.getReceivedMaps().getFirst()).containsKey(questionnaireId);
Assertions.assertThat(surveyUnitQualityToolPerretAdapterStub.getReceivedMaps().getFirst().get(questionnaireId))
.contains(interrogationId);
}

@Test
void processJsonRawDataV2Test(){
//GIVEN
lunaticJsonRawDataPersistanceStub.getMongoStub().clear();
surveyUnitPersistencePortStub.getMongoStub().clear();
Expand All @@ -286,6 +226,7 @@ void processJsonRawDataV2Test(){
DataProcessingContextMapper.INSTANCE.modelToDocument(
DataProcessingContextModel.builder()
.partitionId(questionnaireId)
.collectionInstrumentId(questionnaireId)
.kraftwerkExecutionScheduleList(new ArrayList<>())
.withReview(true)
.build()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -468,22 +468,21 @@ void convertRawData_if_collected_not_present_test() throws Exception {
void convertRawData_multipleBatchs(int rawDataSize) throws Exception {
//GIVEN
String campaignId = "SAMPLETEST-PARADATA-V1";
String questionnaireId = "TESTIDQUEST";
String questionnaireId = "SAMPLETEST-PARADATA-V1";
List<String> interrogationIdList = prepareConvertTest(rawDataSize, campaignId, questionnaireId);
//Activate review
dataProcessingContextPersistancePortStub.getMongoStub().add(
DataProcessingContextMapper.INSTANCE.modelToDocument(
DataProcessingContextModel.builder()
.partitionId(campaignId)
.collectionInstrumentId(questionnaireId)
.withReview(true)
.kraftwerkExecutionScheduleList(new ArrayList<>())
.build()
)
);

//WHEN
DataProcessResult dataProcessResult = lunaticJsonRawDataService.processRawData(campaignId, interrogationIdList,
new ArrayList<>());
DataProcessResult dataProcessResult = lunaticJsonRawDataService.processRawData(questionnaireId);

//THEN
Assertions.assertThat(dataProcessResult.dataCount()).isEqualTo(rawDataSize * 2/*EDITED*/);
Expand All @@ -499,23 +498,22 @@ void convertRawData_multipleBatchs(int rawDataSize) throws Exception {
void convertRawData_review_desactivated() throws Exception {
//GIVEN
String campaignId = "SAMPLETEST-PARADATA-V1";
String questionnaireId = "TESTIDQUEST";
String questionnaireId = "SAMPLETEST-PARADATA-V1";
List<String> interrogationIdList = prepareConvertTest(1, campaignId, questionnaireId);

//Desactivate review
dataProcessingContextPersistancePortStub.getMongoStub().add(
DataProcessingContextMapper.INSTANCE.modelToDocument(
DataProcessingContextModel.builder()
.partitionId(campaignId)
.collectionInstrumentId(questionnaireId)
.withReview(false)
.kraftwerkExecutionScheduleList(new ArrayList<>())
.build()
)
);

//WHEN
DataProcessResult dataProcessResult = lunaticJsonRawDataService.processRawData(campaignId, interrogationIdList,
new ArrayList<>());
DataProcessResult dataProcessResult = lunaticJsonRawDataService.processRawData(questionnaireId);

//THEN
Assertions.assertThat(dataProcessResult.dataCount()).isEqualTo(2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -229,7 +230,7 @@ public Set<String> findUnprocessedInterrogationIdsByCollectionInstrumentId(Strin
lunaticJsonDataDocument -> lunaticJsonDataDocument.processDate() == null
&& lunaticJsonDataDocument.questionnaireId().equals(collectionInstrumentId)
).toList();
Set<String> interrogationIds = new HashSet<>();
Set<String> interrogationIds = new LinkedHashSet<>();
unprocessedDocuments.forEach(doc -> interrogationIds.add(doc.interrogationId()));
return interrogationIds;
}
Expand Down
Loading