Skip to content

Commit

Permalink
Merge branch 'main' into rpawlaszek/relational-components
Browse files Browse the repository at this point in the history
  • Loading branch information
Rafał Pawłaszek committed Jan 3, 2024
2 parents 654d559 + cd02898 commit 0000bc3
Show file tree
Hide file tree
Showing 10 changed files with 1,157 additions and 414 deletions.
13 changes: 13 additions & 0 deletions API.md

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 8 additions & 8 deletions package.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 2 additions & 14 deletions src/salesforce/destination.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ SPDX-License-Identifier: Apache-2.0
import { CfnFlow } from 'aws-cdk-lib/aws-appflow';
import { IConstruct } from 'constructs';
import { SalesforceConnectorProfile } from './profile';
import { SalesforceDataTransferApi } from './salesforce-data-transfer-api';
import { SalesforceConnectorType } from './type';
import { AppFlowPermissionsManager } from '../core/appflow-permissions-manager';
import { ConnectorType } from '../core/connectors/connector-type';
Expand All @@ -13,19 +14,6 @@ import { IFlow } from '../core/flows';
import { IDestination } from '../core/vertices/destination';
import { WriteOperation } from '../core/write-operation';

/**
* The default. Amazon AppFlow selects which API to use based on the number of records that your flow transfers to Salesforce. If your flow transfers fewer than 1,000 records, Amazon AppFlow uses Salesforce REST API. If your flow transfers 1,000 records or more, Amazon AppFlow uses Salesforce Bulk API 2.0.
*
* Each of these Salesforce APIs structures data differently. If Amazon AppFlow selects the API automatically, be aware that, for recurring flows, the data output might vary from one flow run to the next. For example, if a flow runs daily, it might use REST API on one day to transfer 900 records, and it might use Bulk API 2.0 on the next day to transfer 1,100 records. For each of these flow runs, the respective Salesforce API formats the data differently. Some of the differences include how dates are formatted and null values are represented. Also, Bulk API 2.0 doesn't transfer Salesforce compound fields.
*
* By choosing this option, you optimize flow performance for both small and large data transfers, but the tradeoff is inconsistent formatting in the output.
*/
export enum SalesforceDataTransferApi {
AUTOMATIC = 'AUTOMATIC',
BULKV2 = 'BULKV2',
REST_SYNC = 'REST_SYNC'
}

export interface SalesforceDestinationProps {

readonly profile: SalesforceConnectorProfile;
Expand Down Expand Up @@ -89,4 +77,4 @@ export class SalesforceDestination implements IDestination {
}
}

}
}
3 changes: 2 additions & 1 deletion src/salesforce/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ SPDX-License-Identifier: Apache-2.0
export * from './type';
export * from './profile';
export * from './source';
export * from './destination';
export * from './destination';
export * from './salesforce-data-transfer-api';
13 changes: 13 additions & 0 deletions src/salesforce/salesforce-data-transfer-api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/**
* The default. Amazon AppFlow selects which API to use based on the number of records that your flow transfers to Salesforce. If your flow transfers fewer than 1,000 records, Amazon AppFlow uses Salesforce REST API. If your flow transfers 1,000 records or more, Amazon AppFlow uses Salesforce Bulk API 2.0.
*
* Each of these Salesforce APIs structures data differently. If Amazon AppFlow selects the API automatically, be aware that, for recurring flows, the data output might vary from one flow run to the next. For example, if a flow runs daily, it might use REST API on one day to transfer 900 records, and it might use Bulk API 2.0 on the next day to transfer 1,100 records. For each of these flow runs, the respective Salesforce API formats the data differently. Some of the differences include how dates are formatted and null values are represented. Also, Bulk API 2.0 doesn't transfer Salesforce compound fields.
*
* By choosing this option, you optimize flow performance for both small and large data transfers, but the tradeoff is inconsistent formatting in the output.
*/
export enum SalesforceDataTransferApi {

AUTOMATIC = 'AUTOMATIC',
BULKV2 = 'BULKV2',
REST_SYNC = 'REST_SYNC'
}
10 changes: 9 additions & 1 deletion src/salesforce/source.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,20 @@ SPDX-License-Identifier: Apache-2.0
import { CfnFlow } from 'aws-cdk-lib/aws-appflow';
import { IConstruct } from 'constructs';
import { SalesforceConnectorProfile } from './profile';
import { SalesforceDataTransferApi } from './salesforce-data-transfer-api';
import { SalesforceConnectorType } from './type';
import { ConnectorType } from '../core/connectors/connector-type';
import { IFlow } from '../core/flows';
import { ISource } from '../core/vertices/source';

export interface SalesforceSourceProps {
readonly profile: SalesforceConnectorProfile;

/**
* Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data from Salesforce.
*/
readonly dataTransferApi?: SalesforceDataTransferApi;

readonly object: string;
readonly apiVersion?: string;
readonly enableDynamicFieldUpdate?: boolean;
Expand Down Expand Up @@ -39,6 +46,7 @@ export class SalesforceSource implements ISource {
private buildSourceConnectorProperties(): CfnFlow.SourceConnectorPropertiesProperty {
return {
salesforce: {
dataTransferApi: this.props.dataTransferApi,
enableDynamicFieldUpdate: this.props.enableDynamicFieldUpdate,
includeDeletedRecords: this.props.includeDeletedRecords,
object: this.props.object,
Expand All @@ -51,4 +59,4 @@ export class SalesforceSource implements ISource {
scope.node.addDependency(resource);
}
}
}
}
Loading

0 comments on commit 0000bc3

Please sign in to comment.