All files / apps/data-refreshments/src/refreshments-execute/sync/dataConnector dataConnector.service.ts

80.17% Statements 186/232
63.63% Branches 14/22
60% Functions 3/5
80.17% Lines 186/232

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 2331x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 5x 5x 5x 5x 5x 5x 5x 1x 1x                             1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 1x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x     3x 3x 3x 3x 3x 3x 3x 3x             3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x     3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 4x 4x 4x 4x 4x 4x 4x 4x 4x 4x 4x 4x 4x 4x 3x 3x 3x 3x 3x 3x 3x     3x 3x 3x 3x 3x 3x 3x 3x 3x 3x 3x                           3x 3x 1x 1x 1x 1x 3x 2x 2x 3x 3x 3x 1x 1x 1x 1x 1x 1x 1x               1x 1x 1x 1x 1x 1x 1x 1x 5x 1x 1x 4x 4x 4x 4x 8x 8x 8x 8x 4x 4x 4x 4x 1x  
import { HttpException, HttpStatus, Injectable, Logger } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { chunk, pick } from 'lodash-es';
import { In, Repository } from 'typeorm';
 
import {
  CustomObject,
  CustomObjectDefinition,
  DataConnectionName,
  DataConnector,
  type Company,
} from '@amalia/core/models';
import {
  DataConnectorAbstract,
  instantiateConnector,
  isDataConnectorPurgeable,
} from '@amalia/data-capture/connectors/library';
import { StringUtils } from '@amalia/ext/string';
import { assert, toError } from '@amalia/ext/typescript';
 
const ORPHAN_REMOVALS_BATCH_SIZE = 2000;
 
@Injectable()
export class DataConnectorService {
  private readonly logger = new Logger(DataConnectorService.name);
 
  public constructor(
    @InjectRepository(CustomObject, DataConnectionName)
    private readonly customObjectRepository: Repository<CustomObject>,
    @InjectRepository(CustomObjectDefinition)
    private readonly customObjectDefinitionRepository: Repository<CustomObjectDefinition>,
    @InjectRepository(DataConnector)
    private readonly dataConnectorRepository: Repository<DataConnector>,
  ) {}
 
  public async refreshDataConnector(dataConnector: DataConnector, connectorClient: DataConnectorAbstract) {
    // Some connectors have to refresh the authentication parameters right after connecting to cycle tokens.
    if ('updateAuthentication' in connectorClient && connectorClient.updateAuthentication) {
      const newAuthenticationData = await connectorClient.updateAuthentication();

      const encryptedAuthenticationData = DataConnector.encryptAuth(newAuthenticationData);

      if (newAuthenticationData) {
        await this.dataConnectorRepository.update(
          { id: dataConnector.id },
          { encryptedAuth: encryptedAuthenticationData },
        );
      }
    }
  }
 
  /**
   * For each custom object in database matching given data connector object,
   * ensure that still exists on connector side.
   * Otherwise, delete them.
   *
   * @param company
   * @param dataConnector
   * @param objectName
   * @param checkIsOngoing
   * @param isTimeoutReachedFn to check if the exec time allowed on cloud provided will be exceeded soon
   * @param offsetForPrune if this prune is the continuation of a previous one, we'll continue from where we stopped
   */
  public async pruneObjectRecords(
    company: Company,
    dataConnector: DataConnector,
    objectName: string,
    checkIsOngoing: () => Promise<void>,
    isTimeoutReachedFn: () => boolean,
    offsetForPrune: number = 0,
  ): Promise<{
    deletedRecordIds: string[];
    offsetForNextPruneIteration: number;
  }> {
    const connectorInstance = instantiateConnector(dataConnector);
 
    // If the connector does not implement DataConnectorPurgeableInterface, no needs to purge.
    if (!isDataConnectorPurgeable(connectorInstance)) {
      return { deletedRecordIds: [], offsetForNextPruneIteration: 0 };
    }
 
    const customObjectDefinitionName = StringUtils.camelCase(objectName);
    const customObjectDefinition = await this.customObjectDefinitionRepository.findOneBy({
      company: { id: company.id },
      machineName: customObjectDefinitionName,
    });
 
    if (!customObjectDefinition) {
      this.logger.error({
        message: `Definition ${customObjectDefinitionName} not found`,
        company: pick(company, ['id', 'name']),
      });
      throw new Error(`Definition ${customObjectDefinitionName} not found`);
    }
 
    assert(dataConnector.source?.objects, 'Source or objects not found');
 
    // Paginate the orphan removals
    const object = dataConnector.source.objects.find((o) => o.alias === objectName || o.name === objectName);
 
    assert(object, `Object ${objectName} not found`);
 
    // Skip the already computed items if it's not the first run for this refreshment
    let skip = offsetForPrune;
 
    const deletedRecordIds: string[] = [];
 
    const count = await this.customObjectRepository.count({
      where: {
        companyId: company.id,
        definitionId: customObjectDefinition.id,
      },
    });
 
    // If no records, we can skip the process, nothing to prune
    if (count === 0) {
      return { deletedRecordIds: [], offsetForNextPruneIteration: 0 };
    }
 
    const allExternalIdsForCustomObject = (
      await this.customObjectRepository
        .createQueryBuilder('co')
        .select('co.externalId')
        .where('co.companyId = :companyId', { companyId: company.id })
        .andWhere('co.definitionId = :definitionId', { definitionId: customObjectDefinition.id })
        .skip(skip)
        .orderBy('co.externalId', 'ASC')
        .getRawMany<{ co_externalId: string }>()
    ).map((customObject) => customObject.co_externalId);
 
    // Instead of doing chunk of 2000 on the database side, we do it using lodash to avoid DDOS on the database
    const chunks = chunk(allExternalIdsForCustomObject, ORPHAN_REMOVALS_BATCH_SIZE);
    let currentChunkIndex = 0;
 
    while (currentChunkIndex < chunks.length && !isTimeoutReachedFn()) {
      await checkIsOngoing();
      const currentChunkOfExternalIds = chunks[currentChunkIndex];
 
      this.logger.log({
        message: `[${objectName}] Loop on existing ${objectName} to search orphans. ${currentChunkIndex * ORPHAN_REMOVALS_BATCH_SIZE}/${count}`,
        company: pick(company, ['id', 'name']),
      });
 
      const orphanRecordIds = await connectorInstance.getOrphanRecordIds(object, currentChunkOfExternalIds);
 
      deletedRecordIds.push(...orphanRecordIds);
      skip += currentChunkOfExternalIds.length;
      currentChunkIndex += 1;
    }
 
    // To compute the next offset
    // If timeout was not reached then 0, it means we computed everything
    // Else we take the actual offset minus the records deleted
    const nextOffset = isTimeoutReachedFn() ? skip - deletedRecordIds.length : 0;
 
    if (deletedRecordIds.length === 0) {
      return { deletedRecordIds: [], offsetForNextPruneIteration: nextOffset };
    }
 
    this.logger.log({
      message: `[${objectName}] Found ${deletedRecordIds.length} orphans ${objectName} to delete`,
      company: pick(company, ['id', 'name']),
    });
 
    try {
      // Delete custom objects which id is in the to delete list.
      // No transactions: we want to delete all
      await this.bulkDeleteByExternalId(company.id, customObjectDefinition.id, deletedRecordIds);
    } catch (e) {
      const error = toError(e);

      this.logger.error({
        message: `Error when pruning objects ${objectName} of connector ${dataConnector.type}`,
        error,
        company: pick(company, ['id', 'name']),
      });

      throw new HttpException(
        `Error when pruning objects ${objectName} of connector ${dataConnector.type}.`,
        HttpStatus.INTERNAL_SERVER_ERROR,
      );
    }
 
    if (isTimeoutReachedFn()) {
      this.logger.log({
        message: `[${objectName}] partially pruned, the timeout was reached, ${deletedRecordIds.length} objects deleted`,
        company: pick(company, ['id', 'name']),
      });
    } else {
      this.logger.log({ message: `[${objectName}] pruned`, company: pick(company, ['id', 'name']) });
    }
 
    return { deletedRecordIds, offsetForNextPruneIteration: nextOffset };
  }
 
  /**
   * Delete all records of a custom object.
   * @param companyId
   * @param definitionId
   */
  public async purgeObjectRecords(companyId: string, definitionId: string): Promise<number> {
    const { affected } = await this.customObjectRepository.delete({
      companyId,
      definitionId,
    });

    return affected || 0;
  }
 
  /**
   * Bulk delete custom objects by externalIds.
   * @param companyId
   * @param definitionId
   * @param externalIds
   */
  public async bulkDeleteByExternalId(companyId: string, definitionId: string, externalIds: string[]) {
    if (externalIds.length === 0) {
      return;
    }
 
    // Delete custom objects with companyId, definitionId and externalId in the list by chunks of 1000.
    await Promise.all(
      chunk(externalIds, 1000).map((externalIdChunk) =>
        this.customObjectRepository.delete({
          companyId,
          definitionId,
          externalId: In(externalIdChunk),
        }),
      ),
    );
  }
}