Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MODINV-1115 Assign permissions to System User #789

Merged
merged 14 commits into from
Dec 23, 2024
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
## 21.1.0-SNAPSHOT 2024-xx-xx
* Provide consistent handling with concurrency two or more Marc Bib Update events for the same bib record [MODINV-1100](https://folio-org.atlassian.net/browse/MODINV-1100)
* Enable system user for data-import processes [MODINV-1115](https://folio-org.atlassian.net/browse/MODINV-1115)
* Missing x-okapi-user-id header in communications with inventory-storage [MODINV-1134](https://folio-org.atlassian.net/browse/MODINV-1134)

## 21.0.0 2024-10-29
* Existing "035" field is not retained the original position in imported record [MODINV-1049](https://folio-org.atlassian.net/browse/MODINV-1049)
* Update Data Import logic to normalize OCLC 035 values [MODINV-949](https://folio-org.atlassian.net/browse/MODINV-949)
Expand Down
39 changes: 39 additions & 0 deletions descriptors/ModuleDescriptor-template.json
Original file line number Diff line number Diff line change
Expand Up @@ -915,6 +915,45 @@
]
}
],
"metadata": {
"user": {
"type": "system",
"permissions": [
"user-tenants.collection.get",
"mapping-metadata.item.get",
"mapping-metadata.type.item.get",
"converter-storage.jobprofilesnapshots.get",
"source-storage.records.put",
"source-storage.records.post",
"source-storage.records.item.get",
"source-storage.records.formatted.item.get",
"source-storage.records.generation.item.put",
"source-storage.records.matching.collection.post",
"inventory-storage.items.item.post",
"inventory-storage.items.item.put",
"inventory-storage.items.item.get",
"inventory-storage.items.collection.get",
"inventory-storage.material-types.item.get",
"inventory-storage.material-types.collection.get",
"inventory-storage.loan-types.item.get",
"inventory-storage.loan-types.collection.get",
"inventory-storage.locations.item.get",
"inventory-storage.locations.collection.get",
"inventory-storage.holdings.item.get",
"inventory-storage.holdings.item.post",
"inventory-storage.holdings.item.put",
"inventory-storage.holdings.collection.get",
"inventory-storage.instances.item.get",
"inventory-storage.instances.item.put",
"inventory-storage.instances.item.post",
"inventory-storage.instances.collection.get",
"inventory-storage.preceding-succeeding-titles.item.post",
"inventory-storage.preceding-succeeding-titles.collection.get",
"inventory-storage.preceding-succeeding-titles.item.put",
"inventory-storage.preceding-succeeding-titles.item.delete"
]
}
},
"launchDescriptor": {
"dockerImage": "${artifactId}:${version}",
"dockerPull": false,
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@
<liquibase.version>4.9.1</liquibase.version>
<kafkaclients.version>3.1.0</kafkaclients.version>
<junit.version>4.13.2</junit.version>
<data-import-processing-core.version>4.3.1</data-import-processing-core.version>
<data-import-processing-core.version>4.4.0-SNAPSHOT</data-import-processing-core.version>
<folio-module-descriptor-validator.version>1.0.0</folio-module-descriptor-validator.version>
</properties>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.folio.MappingProfile;
import org.folio.inventory.common.Context;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.domain.AuthorityRecordCollection;
import org.folio.inventory.storage.Storage;
import org.folio.inventory.validation.exceptions.JsonMappingException;
Expand Down Expand Up @@ -78,7 +79,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload p

prepareEvent(payload);

var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl());
var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl(), payload.getContext().get(EventHandlingUtil.USER_ID));
var jobExecutionId = payload.getJobExecutionId();
mappingMetadataCache.get(jobExecutionId, context)
.map(mapMetadataOrFail())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.folio.inventory.consortium.services.ConsortiumService;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.entities.PartialError;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.dataimport.services.OrderHelperService;
import org.folio.inventory.consortium.util.ConsortiumUtil;
import org.folio.inventory.dataimport.util.ParsedRecordUtil;
Expand Down Expand Up @@ -96,7 +97,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
return CompletableFuture.failedFuture(new EventProcessingException(ACTION_HAS_NO_MAPPING_MSG));
}

Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = payloadContext.get(RECORD_ID_HEADER);
String chunkId = payloadContext.get(CHUNK_ID_HEADER);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
return CompletableFuture.failedFuture(new EventProcessingException(format(ACTION_HAS_NO_MAPPING_MSG, jobExecutionId, recordId)));
}

Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
Record targetRecord = Json.decodeValue(payloadContext.get(EntityType.MARC_BIBLIOGRAPHIC.value()), Record.class);
var sourceContent = targetRecord.getParsedRecord().getContent().toString();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
Future<RecordToEntity> recordToItemFuture = idStorageService.store(recordId, UUID.randomUUID().toString(), dataImportEventPayload.getTenant());
recordToItemFuture.onSuccess(res -> {
String deduplicationItemId = res.getEntityId();
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
ItemCollection itemCollection = storage.getItemCollection(context);

mappingMetadataCache.get(jobExecutionId, context)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.folio.MappingMetadataDto;
import org.folio.inventory.common.Context;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.domain.HoldingsRecordCollection;
import org.folio.inventory.domain.relationship.RecordToEntity;
import org.folio.inventory.services.HoldingsCollectionService;
Expand Down Expand Up @@ -99,7 +100,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
return CompletableFuture.failedFuture(new EventProcessingException(ACTION_HAS_NO_MAPPING_MSG));
}

Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
Record targetRecord = new JsonObject(payloadContext.get(EntityType.MARC_HOLDINGS.value())).mapTo(Record.class);
prepareEvent(dataImportEventPayload);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.folio.Authority;
import org.folio.DataImportEventPayload;
import org.folio.inventory.dataimport.exceptions.DataImportException;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.domain.AuthorityRecordCollection;
import org.folio.inventory.storage.Storage;
import org.folio.processing.events.services.handler.EventHandler;
Expand Down Expand Up @@ -54,7 +55,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload p
throw new EventProcessingException(UNEXPECTED_PAYLOAD_MSG);
}

var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl());
var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl(), payload.getContext().get(EventHandlingUtil.USER_ID));
AuthorityRecordCollection authorityRecordCollection = storage.getAuthorityRecordCollection(context);
String id = payload.getContext().get(AUTHORITY_RECORD_ID);
LOGGER.info("Delete authority with id: {}", id);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
}

record.setExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId));
Context localTenantContext = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context targetInstanceContext = EventHandlingUtil.constructContext(getTenant(dataImportEventPayload), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context localTenantContext = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
Context targetInstanceContext = EventHandlingUtil.constructContext(getTenant(dataImportEventPayload), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
Promise<Instance> instanceUpdatePromise = Promise.promise();

mappingMetadataCache.get(dataImportEventPayload.getJobExecutionId(), localTenantContext)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
LOGGER.info("handle:: Processing ReplaceInstanceEventHandler starting with jobExecutionId: {} and incomingRecordId: {}.",
dataImportEventPayload.getJobExecutionId(), payloadContext.get(INCOMING_RECORD_ID));

Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
Instance instanceToUpdate = Instance.fromJson(new JsonObject(dataImportEventPayload.getContext().get(INSTANCE.value())));

if (instanceToUpdate.getSource() != null && instanceToUpdate.getSource().equals(LINKED_DATA.getValue())) {
Expand All @@ -132,7 +132,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
.compose(consortiumConfigurationOptional -> {
if (consortiumConfigurationOptional.isPresent()) {
String centralTenantId = consortiumConfigurationOptional.get().getCentralTenantId();
Context centralTenantContext = EventHandlingUtil.constructContext(centralTenantId, context.getToken(), context.getOkapiLocation());
Context centralTenantContext = EventHandlingUtil.constructContext(centralTenantId, context.getToken(), context.getOkapiLocation(), payloadContext.get(EventHandlingUtil.USER_ID));
InstanceCollection instanceCollection = storage.getInstanceCollection(centralTenantContext);
InstanceUtil.findInstanceById(instanceToUpdate.getId(), instanceCollection)
.onSuccess(existedCentralTenantInstance -> {
Expand All @@ -154,7 +154,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
});
} else {
String targetInstanceTenantId = dataImportEventPayload.getContext().getOrDefault(CENTRAL_TENANT_ID, dataImportEventPayload.getTenant());
Context instanceUpdateContext = EventHandlingUtil.constructContext(targetInstanceTenantId, dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context instanceUpdateContext = EventHandlingUtil.constructContext(targetInstanceTenantId, dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
InstanceCollection instanceCollection = storage.getInstanceCollection(instanceUpdateContext);

InstanceUtil.findInstanceById(instanceToUpdate.getId(), instanceCollection)
Expand Down Expand Up @@ -262,7 +262,7 @@ private Future<Snapshot> copySnapshotToOtherTenant(String snapshotId, DataImport
.withStatus(Snapshot.Status.COMMITTED)
.withProcessingStartedDate(new Date());

var context = EventHandlingUtil.constructContext(tenantId, dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
var context = EventHandlingUtil.constructContext(tenantId, dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), dataImportEventPayload.getContext().get(EventHandlingUtil.USER_ID));
return postSnapshotInSrsAndHandleResponse(context, snapshot);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.folio.inventory.common.api.request.PagingParameters;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.entities.PartialError;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.domain.HoldingsRecordCollection;
import org.folio.inventory.dataimport.entities.OlHoldingsAccumulativeResults;
import org.folio.inventory.domain.items.ItemCollection;
Expand Down Expand Up @@ -113,7 +114,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
future.complete(dataImportEventPayload);
return future;
}
Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), dataImportEventPayload.getContext().get(EventHandlingUtil.USER_ID));
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = dataImportEventPayload.getContext().get(RECORD_ID_HEADER);
String chunkId = dataImportEventPayload.getContext().get(CHUNK_ID_HEADER);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload d
}
LOGGER.info("handle:: Processing UpdateItemEventHandler starting with jobExecutionId: {}, incomingRecordId: {}.",
dataImportEventPayload.getJobExecutionId(), dataImportEventPayload.getContext().get(INCOMING_RECORD_ID));
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = dataImportEventPayload.getContext().get(RECORD_ID_HEADER);
String chunkId = dataImportEventPayload.getContext().get(CHUNK_ID_HEADER);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.folio.inventory.common.domain.Failure;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.exceptions.DataImportException;
import org.folio.inventory.dataimport.handlers.matching.util.EventHandlingUtil;
import org.folio.inventory.domain.HoldingsRecordCollection;
import org.folio.inventory.domain.instances.InstanceCollection;
import org.folio.inventory.storage.Storage;
Expand Down Expand Up @@ -103,7 +104,7 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload p

prepareEvent(payload);

var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl());
var context = constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl(), payload.getContext().get(EventHandlingUtil.USER_ID));
var jobExecutionId = payload.getJobExecutionId();
LOGGER.info("Update marc holding with jobExecutionId: {}, incomingRecordId: {}",
jobExecutionId, payload.getContext().get(INCOMING_RECORD_ID));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,14 +80,14 @@ public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload p
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
LOGGER.info("handle:: Processing {} modifying starting with jobExecutionId: {}.", modifiedEntityType(), payload.getJobExecutionId());
Context localTenantContext = EventHandlingUtil.constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl());
Context localTenantContext = EventHandlingUtil.constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));

mappingMetadataCache.get(payload.getJobExecutionId(), localTenantContext)
.map(mapMappingMetaDataOrFail(format(MAPPING_PARAMETERS_NOT_FOUND_MSG, payload.getJobExecutionId())))
.compose(mappingMetadataDto -> modifyRecord(payload, getMappingParameters(mappingMetadataDto)).map(mappingMetadataDto))
.compose(mappingMetadataDto -> {
if (payloadContext.containsKey(relatedEntityType().value())) {
Context targetInstanceContext = EventHandlingUtil.constructContext(getTenant(payload), payload.getToken(), payload.getOkapiUrl());
Context targetInstanceContext = EventHandlingUtil.constructContext(getTenant(payload), payload.getToken(), payload.getOkapiUrl(), payloadContext.get(EventHandlingUtil.USER_ID));
return updateRelatedEntity(payload, mappingMetadataDto, targetInstanceContext)
.compose(v -> updateRecord(getRecord(payload.getContext()), targetInstanceContext));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ private void populatePayloadWithExternalIdentifiers(RecordsIdentifiersCollection

private Future<List<Record>> matchCentralTenantIfNeededAndCombineWithLocalMatchedRecords(RecordMatchingDto recordMatchingDto, DataImportEventPayload payload,
Optional<Record> localMatchedRecord) {
Context context = EventHandlingUtil.constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl());
Context context = EventHandlingUtil.constructContext(payload.getTenant(), payload.getToken(), payload.getOkapiUrl(), payload.getContext().get(EventHandlingUtil.USER_ID));
return consortiumService.getConsortiumConfiguration(context)
.compose(consortiumConfigurationOptional -> {
if (consortiumConfigurationOptional.isPresent() && !consortiumConfigurationOptional.get().getCentralTenantId().equals(payload.getTenant())) {
Expand Down
Loading