diff --git a/api/apps/api/src/modules/geo-features/import/features-amounts-upload.service.ts b/api/apps/api/src/modules/geo-features/import/features-amounts-upload.service.ts
index e99665c53d..dc58a9a09a 100644
--- a/api/apps/api/src/modules/geo-features/import/features-amounts-upload.service.ts
+++ b/api/apps/api/src/modules/geo-features/import/features-amounts-upload.service.ts
@@ -51,6 +51,9 @@ export class FeatureAmountUploadService {
 
     let newFeaturesFromCsvUpload;
     try {
+      this.logger.log(
+        `Starting process of parsing csv file and saving amount data to temporary storage`,
+      );
       await this.events.createEvent(data);
       // saving feature data to temporary table
       const featuresRegistry = await this.saveCsvToRegistry(
@@ -63,13 +66,17 @@ export class FeatureAmountUploadService {
       }
       // Saving new features to apiDB 'features' table
 
+      this.logger.log(`Saving new features to (apiBD).features table...`);
       newFeaturesFromCsvUpload = await this.saveNewFeaturesFromCsvUpload(
         apiQueryRunner,
         featuresRegistry.right.id,
         data.projectId,
       );
-
+      this.logger.log(`New features saved in (apiBD).features table`);
       // Saving new features amounts and geoms to geoDB 'features_amounts' table
+      this.logger.log(
+        `Starting the process of saving new features amounts and geoms to (geoDB).features_amounts table`,
+      );
       await this.saveNewFeaturesAmountsFromCsvUpload(
         newFeaturesFromCsvUpload,
         apiQueryRunner,
@@ -79,18 +86,25 @@ export class FeatureAmountUploadService {
       );
 
       // Removing temporary data from apiDB uploads tables
+      this.logger.log(
+        `Removing data from temporary tables after successful upload...`,
+      );
       await apiQueryRunner.manager.delete(FeatureAmountUploadRegistry, {
         id: featuresRegistry.right.id,
       });
 
+      this.logger.log(
+        `Upload temporary data removed from apiDB uploads tables`,
+      );
       // Setting project source to legacy-import to create puvspr.dat files from pre-calculated amounts, to allow to use new features after upload
-
+      this.logger.log(`Updating project sources value to legacy-import...`);
       await this.updateProjectSources(
         data.projectId,
         ProjectSourcesEnum.legacyImport,
         apiQueryRunner.manager,
       );
 
+      this.logger.log(`Csv file upload process finished successfully`);
       // Committing transaction
 
       await apiQueryRunner.commitTransaction();
@@ -122,9 +136,14 @@ export class FeatureAmountUploadService {
     queryRunner: QueryRunner,
   ): Promise<any> {
     try {
+      this.logger.log(`Parsing csv file...`);
+
       const parsedFile = await featureAmountCsvParser(data.fileBuffer);
 
       const { featureNames, puids } = this.getFeatureNamesAndPuids(parsedFile);
+
+      this.logger.log(`Validating parsed csv file...`);
+
       if (
         await this.areFeatureNamesNotAlreadyUsedInProject(
           data.projectId,
@@ -138,6 +157,7 @@ export class FeatureAmountUploadService {
         return left(unknownPuidsInFeatureAmountCsvUpload);
       }
 
+      this.logger.log(`Saving parsed data to temporary storage...`);
       const importedRegistry = await this.saveFeaturesToRegistry(
         parsedFile,
         data.projectId,
@@ -163,13 +183,16 @@ export class FeatureAmountUploadService {
       features,
       CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS,
     );
+
+    this.logger.log(`Saving a new upload data to temporary table...`);
     const newUpload = await entityManager
       .getRepository(FeatureAmountUploadRegistry)
       .save({
         projectId,
         userId,
       });
-    for (const chunk of featuresChunks) {
+    for (const [index, chunk] of featuresChunks.entries()) {
+      this.logger.log(`Inserting chunk ${index} to temporary table...`);
       await entityManager
         .createQueryBuilder()
         .insert()
@@ -177,6 +200,7 @@ export class FeatureAmountUploadService {
         .values(chunk.map((feature) => ({ ...feature, upload: newUpload })))
         .execute();
     }
+    this.logger.log(`New csv upload data from saved to temporary tables`);
     return newUpload;
   }
 
@@ -219,7 +243,12 @@ export class FeatureAmountUploadService {
     uploadId: string,
     projectId: string,
   ) {
-    for (const newFeature of newFeaturesFromCsvUpload) {
+    for (const [index, newFeature] of newFeaturesFromCsvUpload.entries()) {
+      this.logger.log(
+        `Getting feature amounts for feature number  ${index + 1}: ${
+          newFeature.feature_class_name
+        }`,
+      );
       const featureAmounts = await apiQueryRunner.manager
         .createQueryBuilder()
         .select(['fa.puid', 'fa.amount'])
@@ -235,9 +264,18 @@ export class FeatureAmountUploadService {
         CHUNK_SIZE_FOR_BATCH_APIDB_OPERATIONS,
       );
 
-      for (const featureChunk of featuresChunks) {
+      this.logger.log(
+        `Feature data divided into  ${featuresChunks.length} chunks`,
+      );
+      for (const [amountIndex, featureChunk] of featuresChunks.entries()) {
+        this.logger.log(
+          `Starting the process of saving chunk with index ${amountIndex} of amounts of feature ${newFeature.feature_class_name}...`,
+        );
         const firstParameterNumber = 2;
         const parameters: any[] = [projectId];
+        this.logger.log(
+          `Generating values to insert for chunk with index ${amountIndex}...`,
+        );
         const valuesToInsert = featureChunk.map((featureAmount, index) => {
           parameters.push(
             ...[
@@ -260,9 +298,13 @@ export class FeatureAmountUploadService {
             )
             `;
         });
+
+        this.logger.log(
+          `Inserting amount values of chunk with index ${amountIndex} into (geoDB).features_data table...`,
+        );
         await geoQueryRunner.manager.query(
           `
-           WITH project_pus AS (
+           WITH project_pus AS NOT MATERIALIZED (
                 SELECT ppu.id, ppu.puid, pug.the_geom FROM projects_pu ppu JOIN planning_units_geom pug ON pug.id = ppu.geom_id WHERE ppu.project_id = $1
             )
             INSERT INTO features_data (the_geom, feature_id, amount, project_pu_id)
@@ -272,8 +314,15 @@ export class FeatureAmountUploadService {
           `,
           parameters,
         );
+        this.logger.log(
+          `Chunk with index ${amountIndex} saved to (geoDB).features_data`,
+        );
       }
+      this.logger.log(
+        `All chunks of feature ${newFeature.feature_class_name} saved`,
+      );
     }
+    this.logger.log(`All new features data saved to (geoDB).features_data`);
   }
 
   private async areFeatureNamesNotAlreadyUsedInProject(