From 88790a2a31874b78702132c7272f2168ba622b4e Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Mon, 20 Nov 2023 23:33:17 +0500 Subject: [PATCH 01/21] Set adjusted_gain_loss in PnL report using pnl_adjusted_gain_loss flag --- .DS_Store | Bin 6148 -> 8196 bytes tap_quickbooks/__init__.py | 4 +++- tap_quickbooks/quickbooks/__init__.py | 7 +++++-- .../ProfitAndLossDetailReport.py | 5 ++++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.DS_Store b/.DS_Store index 56abb8ea64483eef2828b876b2331219f7ebf15f..e32bfdc5cbf3d5772f5956fccaeeb3c9f5e05aaa 100644 GIT binary patch literal 8196 zcmeHLU2GIp6u#fI(3vTfDFQ7!0xJ!+DTQteQhxkd`Xe+&OW6KVs>|%oKu4xCWoLGa zHq{zm6czv8jPb!2Oo$jvjQEP07!&`n@h8CtW7H=TUwq)jc<$UOp?@zDBMEbpd(Yf^ z&bjBFGvB$hXBlH?FPbfk)iTCp>YQqoG%V6MpZ5h#idM>rg7g_HFpCAu&rSH_3#8+P zI0A75;t0eMh$9e3;8utLeY1I!m-y}rZal^jh$C=YMu6WBN$Q-YBRVZ;cy-VamHe$P2xj(8vD#vR)aQmOACm6b1BUO}z0;?c~g zH<1a9py;<7g`<4d=Xp+U_Swh{*w#c=TixNgfnmE=AylwUi$q2SZP)ZB+P$J{`k`({ zFcfN8t!8p^M`}k?GBwqdoZPj&wJF(>YTG?ErO0dQn)e>go^-}s?|I<~qT7H~&GYQc zVs1HpsYAF`rp~(cL*$mMsFiyEfI5`bR+Z@Nz2kxHx&DAv@QG%%l|K2{o%h^B1&i|s z^PW>2bN#GVoj06((J=yRpW|fgCoO*{uucd3Y1sD)K}RT<)wH5-9kxi$E?J!vIT|de>9R-UVqW_Osywc+`CHEhw|Ro7`f<2YY(P%{V{I#py3C-ubv zX@&=?U0YM7ck4Z}c10fcM~#BjJ3&~RiPn2%Iec-n&UfoF?ZCUv%y6b=yCs`oOs`m= zS*}Yr2q?{oic*(OMF^AGI!c!`JHSRLWyaaF>_v8#z0BTX=h*w~61&VkXJ4@^>{~X& zeqz6{KiFUFZ!iE7Dp89Bblio_XhAEs<38*|2RhM(N03231~7k_ zG@iq&cnz=PO`ONOxPbR?377FPKEW6G5?|vRT*a^W4Zq`>BuQmbxl|+7N_R`!q-Jq$ zX*N@iM1Dj>+C>iwE&$4uagiyKu~?=gI=j^U|4*j;xI*N{ovYVusB75TN*Qw7=?zCai1A#B{S-FB8hO zjeViFBe}rcBV} ze-F{O*?D%6eZ)Q`h>2^H0@bJ?XmVg~Ks_4Jh%MNPooK^u>>-eL;UOGA zH+s-VARWXIhLMK}3obmILIFVp)Ms!8FW@Dd#VZ8aH*gN`%md(oxd6n)#Q<2!6kNx1 zPY`bwxO3Iw+093wOl(oh>;JB$|Np;vL-7uA1mXzXvJpUeXQs2Aj4gF}!RA}+uUq+LPwZ!G-IJegm{QUN3ZG=vFCvw&zI2jo(a;~6%` I^UPre0A$!7lK=n! diff --git a/tap_quickbooks/__init__.py b/tap_quickbooks/__init__.py index 6dcb839..4c0d463 100644 --- a/tap_quickbooks/__init__.py +++ b/tap_quickbooks/__init__.py @@ -272,7 +272,9 @@ def main_impl(): gl_full_sync = CONFIG.get('gl_full_sync'), gl_weekly = CONFIG.get('gl_weekly', False), gl_daily = CONFIG.get('gl_daily', False), - gl_basic_fields = CONFIG.get('gl_basic_fields', False)) + gl_basic_fields = CONFIG.get('gl_basic_fields', False), + pnl_adjusted_gain_loss = CONFIG.get('pnl_adjusted_gain_loss', False) + ) qb.login() if args.discover: diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 6429aca..096cf96 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -243,7 +243,9 @@ def __init__(self, gl_weekly = None, gl_daily = None, gl_basic_fields = None, - realm_id=None): + realm_id=None, + pnl_adjusted_gain_loss = None + ): self.api_type = api_type.upper() if api_type else None self.report_period_days = report_period_days self.gl_full_sync = gl_full_sync @@ -258,6 +260,7 @@ def __init__(self, self.qb_client_secret = qb_client_secret self.session = requests.Session() self.access_token = None + self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss self.base_url = "https://sandbox-quickbooks.api.intuit.com/v3/company/" if is_sandbox is True else 'https://quickbooks.api.intuit.com/v3/company/' @@ -485,5 +488,5 @@ def query_report(self, catalog_entry, state, state_passed): elif catalog_entry["stream"] == "TransactionListReport": reader = TransactionListReport(self, start_date, state_passed) else: - reader = ProfitAndLossDetailReport(self, start_date, state_passed) + reader = ProfitAndLossDetailReport(self, start_date, state_passed,pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss) return reader.sync(catalog_entry) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index c31dfa7..d8c50b8 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -17,10 +17,11 @@ class ProfitAndLossDetailReport(QuickbooksStream): replication_method: ClassVar[str] = 'FULL_TABLE' current_account = {} - def __init__(self, qb, start_date, state_passed): + def __init__(self, qb, start_date, state_passed,pnl_adjusted_gain_loss=None): self.qb = qb self.start_date = start_date self.state_passed = state_passed + self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss def _get_column_metadata(self, resp): columns = [] @@ -117,6 +118,8 @@ def sync(self, catalog_entry): "accounting_method": "Accrual", "columns": ",".join(cols) } + if self.pnl_adjusted_gain_loss: + params.update({"adjusted_gain_loss":self.pnl_adjusted_gain_loss}) LOGGER.info(f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}") resp = self._get(report_entity='ProfitAndLossDetail', params=params) From 7c14db2711ccfdcdb2905952caedfa25862a0719 Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Thu, 23 Nov 2023 00:32:30 +0500 Subject: [PATCH 02/21] Fix for parsing unrealized row for PnLDetail report --- .../ProfitAndLossDetailReport.py | 100 +++++++++++++----- 1 file changed, 71 insertions(+), 29 deletions(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index d8c50b8..df80975 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -10,14 +10,15 @@ LOGGER = singer.get_logger() NUMBER_OF_PERIODS = 3 + class ProfitAndLossDetailReport(QuickbooksStream): - tap_stream_id: ClassVar[str] = 'ProfitAndLossDetailReport' - stream: ClassVar[str] = 'ProfitAndLossDetailReport' + tap_stream_id: ClassVar[str] = "ProfitAndLossDetailReport" + stream: ClassVar[str] = "ProfitAndLossDetailReport" key_properties: ClassVar[List[str]] = [] - replication_method: ClassVar[str] = 'FULL_TABLE' + replication_method: ClassVar[str] = "FULL_TABLE" current_account = {} - def __init__(self, qb, start_date, state_passed,pnl_adjusted_gain_loss=None): + def __init__(self, qb, start_date, state_passed, pnl_adjusted_gain_loss=None): self.qb = qb self.start_date = start_date self.state_passed = state_passed @@ -35,14 +36,16 @@ def _get_column_metadata(self, resp): def _recursive_row_search(self, row, output, categories): row_group = row.get("Rows") - if row.get("type")=="Section": + if row.get("type") == "Section": if row.get("Header", {}).get("ColData", [{}]): if row.get("Header", {}).get("ColData", [{}])[0].get("id"): self.current_account = row.get("Header", {}).get("ColData", [{}])[0] - if 'ColData' in list(row.keys()): + if "ColData" in list(row.keys()): # Write the row data = row.get("ColData") values = [column for column in data] + if len([v for v in values if "Unrealized" in v.get("value")]) > 0: + values categories_copy = categories.copy() values.append(categories_copy) values_copy = values.copy() @@ -106,23 +109,30 @@ def sync(self, catalog_entry): start_date = self.start_date.date() delta = 364 - while start_datedatetime.date.today(): + end_date = start_date + datetime.timedelta(delta) + if end_date > datetime.date.today(): end_date = datetime.date.today() params = { "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), "accounting_method": "Accrual", - "columns": ",".join(cols) + "columns": ",".join(cols), } if self.pnl_adjusted_gain_loss: - params.update({"adjusted_gain_loss":self.pnl_adjusted_gain_loss}) - - LOGGER.info(f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='ProfitAndLossDetail', params=params) + params.update({"adjusted_gain_loss": "true"}) + #Don't send columns with this param + del params["columns"] + + LOGGER.info( + f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" + ) + LOGGER.info( + f"Fetch Report with params {params}" + ) + resp = self._get(report_entity="ProfitAndLossDetail", params=params) start_date = end_date + datetime.timedelta(1) # Get column metadata. @@ -156,17 +166,37 @@ def sync(self, catalog_entry): cleansed_row[f"{k}Id"] = v.get("id") else: cleansed_row[k] = v - - cleansed_row["Amount"] = float(cleansed_row.get("Amount")) if cleansed_row.get("Amount") else None - cleansed_row["Balance"] = float(cleansed_row.get("Balance")) if cleansed_row.get("Amount") else None - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") - if cleansed_row.get('Date'): + try: + cleansed_row["Amount"] = ( + float(cleansed_row.get("Amount")) + if cleansed_row.get("Amount") + else None + ) + except: + cleansed_row["Amount"] = None + try: + cleansed_row["Balance"] = ( + float(cleansed_row.get("Balance")) + if cleansed_row.get("Amount") + else None + ) + except: + cleansed_row["Balance"] = None + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) + if cleansed_row.get("Date"): try: - cleansed_row["Date"] = parse(cleansed_row['Date']) + cleansed_row["Date"] = parse(cleansed_row["Date"]) except: - continue + if "Unrealized" in cleansed_row["Date"]: + cleansed_row["TransactionType"] = cleansed_row["Date"] + cleansed_row["Date"] = None + else: + continue yield cleansed_row + else: LOGGER.info(f"Syncing P&L of last {NUMBER_OF_PERIODS} periods") end_date = datetime.date.today() @@ -177,11 +207,13 @@ def sync(self, catalog_entry): "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), "accounting_method": "Accrual", - "columns": ",".join(cols) + "columns": ",".join(cols), } - LOGGER.info(f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='ProfitAndLossDetail', params=params) + LOGGER.info( + f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" + ) + resp = self._get(report_entity="ProfitAndLossDetail", params=params) # Get column metadata. columns = self._get_column_metadata(resp) @@ -216,11 +248,21 @@ def sync(self, catalog_entry): else: cleansed_row[k] = v - cleansed_row["Amount"] = float(cleansed_row.get("Amount")) if cleansed_row.get("Amount") else None - cleansed_row["Balance"] = float(cleansed_row.get("Balance")) if cleansed_row.get("Amount") else None - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") - if cleansed_row.get('Date'): - cleansed_row["Date"] = parse(cleansed_row['Date']) + cleansed_row["Amount"] = ( + float(cleansed_row.get("Amount")) + if cleansed_row.get("Amount") + else None + ) + cleansed_row["Balance"] = ( + float(cleansed_row.get("Balance")) + if cleansed_row.get("Amount") + else None + ) + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) + if cleansed_row.get("Date"): + cleansed_row["Date"] = parse(cleansed_row["Date"]) yield cleansed_row From 4d680e87c1823edf9892494ac0cb7b6c3d323ec7 Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Thu, 23 Nov 2023 00:48:07 +0500 Subject: [PATCH 03/21] Removed debug code for PnL detail report --- .../quickbooks/reportstreams/ProfitAndLossDetailReport.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index df80975..37bbc77 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -44,8 +44,6 @@ def _recursive_row_search(self, row, output, categories): # Write the row data = row.get("ColData") values = [column for column in data] - if len([v for v in values if "Unrealized" in v.get("value")]) > 0: - values categories_copy = categories.copy() values.append(categories_copy) values_copy = values.copy() From ff0e69dd8bb5e73bd443d7725630c9ef1736ff10 Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Thu, 23 Nov 2023 02:41:28 +0500 Subject: [PATCH 04/21] Process header and summary as rows for BalanceSheetReport --- .../MonthlyBalanceSheetReport.py | 69 ++++++++++++------- 1 file changed, 45 insertions(+), 24 deletions(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py b/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py index 70a94aa..3b756ae 100644 --- a/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py @@ -9,16 +9,18 @@ LOGGER = singer.get_logger() NUMBER_OF_PERIODS = 3 + class MonthlyBalanceSheetReport(QuickbooksStream): - tap_stream_id: ClassVar[str] = 'MonthlyBalanceSheetReport' - stream: ClassVar[str] = 'MonthlyBalanceSheetReport' + tap_stream_id: ClassVar[str] = "MonthlyBalanceSheetReport" + stream: ClassVar[str] = "MonthlyBalanceSheetReport" key_properties: ClassVar[List[str]] = [] - replication_method: ClassVar[str] = 'FULL_TABLE' + replication_method: ClassVar[str] = "FULL_TABLE" - def __init__(self, qb, start_date, state_passed): + def __init__(self, qb, start_date, state_passed, pnl_adjusted_gain_loss=None): self.qb = qb self.start_date = start_date self.state_passed = state_passed + self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss def _get_column_metadata(self, resp): columns = [] @@ -34,7 +36,7 @@ def _get_column_metadata(self, resp): def _recursive_row_search(self, row, output, categories): row_group = row.get("Rows") - if 'ColData' in list(row.keys()): + if "ColData" in list(row.keys()): # Write the row data = row.get("ColData") values = [column.get("value") for column in data] @@ -47,10 +49,17 @@ def _recursive_row_search(self, row, output, categories): else: row_array = row_group.get("Row") header = row.get("Header") + summary = row.get("Summary") if header is not None: categories.append(header.get("ColData")[0].get("value")) + if summary is not None: + categories.append(summary.get("ColData")[0].get("value")) for row in row_array: self._recursive_row_search(row, output, categories) + if header is not None: + self._recursive_row_search(header, output, categories) + if summary is not None: + self._recursive_row_search(summary, output, categories) if header is not None: categories.pop() @@ -65,11 +74,16 @@ def sync(self, catalog_entry): "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), "accounting_method": "Accrual", - "summarize_column_by": "Month" + "summarize_column_by": "Month", } - LOGGER.info(f"Fetch MonthlyBalanceSheet Report for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='BalanceSheet', params=params) + if self.pnl_adjusted_gain_loss: + params.update({"adjusted_gain_loss": "true"}) + + LOGGER.info( + f"Fetch MonthlyBalanceSheet Report for period {params['start_date']} to {params['end_date']}" + ) + resp = self._get(report_entity="BalanceSheet", params=params) # Get column metadata. columns = self._get_column_metadata(resp) @@ -100,17 +114,20 @@ def sync(self, catalog_entry): else: cleansed_row.update({k: v}) - - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) monthly_total = [] - for key,value in cleansed_row.items(): - if key not in ['Account', 'Categories', 'SyncTimestampUtc']: - monthly_total.append({key:value}) - cleansed_row['MonthlyTotal'] = monthly_total + for key, value in cleansed_row.items(): + if key not in ["Account", "Categories", "SyncTimestampUtc"]: + monthly_total.append({key: value}) + cleansed_row["MonthlyTotal"] = monthly_total yield cleansed_row else: - LOGGER.info(f"Syncing MonthlyBalanceSheet of last {NUMBER_OF_PERIODS} periods") + LOGGER.info( + f"Syncing MonthlyBalanceSheet of last {NUMBER_OF_PERIODS} periods" + ) end_date = datetime.date.today() for i in range(NUMBER_OF_PERIODS): @@ -119,11 +136,13 @@ def sync(self, catalog_entry): "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), "accounting_method": "Accrual", - "summarize_column_by": "Month" + "summarize_column_by": "Month", } - LOGGER.info(f"Fetch MonthlyBalanceSheet for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='BalanceSheet', params=params) + LOGGER.info( + f"Fetch MonthlyBalanceSheet for period {params['start_date']} to {params['end_date']}" + ) + resp = self._get(report_entity="BalanceSheet", params=params) # Get column metadata. columns = self._get_column_metadata(resp) @@ -151,13 +170,15 @@ def sync(self, catalog_entry): continue else: cleansed_row.update({k: v}) - - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) monthly_total = [] - for key,value in cleansed_row.items(): - if key not in ['Account', 'Categories', 'SyncTimestampUtc']: - monthly_total.append({key:value}) - cleansed_row['MonthlyTotal'] = monthly_total + for key, value in cleansed_row.items(): + if key not in ["Account", "Categories", "SyncTimestampUtc"]: + monthly_total.append({key: value}) + cleansed_row["MonthlyTotal"] = monthly_total yield cleansed_row From 5192bdd4a42d86cb444b6226bc6e507501a78215 Mon Sep 17 00:00:00 2001 From: Hassan Syyid Date: Wed, 22 Nov 2023 20:57:49 -0500 Subject: [PATCH 05/21] Fix issue with PnL --- .../reportstreams/ProfitAndLossDetailReport.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index 37bbc77..08f7b99 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -259,8 +259,16 @@ def sync(self, catalog_entry): cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" ) + if cleansed_row.get("Date"): - cleansed_row["Date"] = parse(cleansed_row["Date"]) + try: + cleansed_row["Date"] = parse(cleansed_row["Date"]) + except: + if "Unrealized" in cleansed_row["Date"]: + cleansed_row["TransactionType"] = cleansed_row["Date"] + cleansed_row["Date"] = None + else: + continue yield cleansed_row From 7ab62fde28e59430cab36a3e22ab5ad0421e4059 Mon Sep 17 00:00:00 2001 From: Hassan Syyid Date: Wed, 22 Nov 2023 21:09:02 -0500 Subject: [PATCH 06/21] fix datetime type --- tap_quickbooks/quickbooks/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 096cf96..e4602c7 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -94,12 +94,10 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches } datetime_type = { - "anyOf": [ - { - "type": "string", - "format": "date-time" - }, - string_type + "format": "date-time", + "type": [ + "null", + "string" ] } From f59940cf31582d23f9135da1a004709fa8ee9368 Mon Sep 17 00:00:00 2001 From: Tarkeshwar Thakur <77226080+tark-dt@users.noreply.github.com> Date: Wed, 22 Nov 2023 18:17:23 -0800 Subject: [PATCH 07/21] Update ProfitAndLossDetailReport.py (#81) --- .../quickbooks/reportstreams/ProfitAndLossDetailReport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index 08f7b99..1d1cacb 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -189,7 +189,7 @@ def sync(self, catalog_entry): except: if "Unrealized" in cleansed_row["Date"]: cleansed_row["TransactionType"] = cleansed_row["Date"] - cleansed_row["Date"] = None + cleansed_row["Date"] = end_date else: continue From 57710080bd06917cd8986bc2c671e58d17acb3d0 Mon Sep 17 00:00:00 2001 From: Hassan Syyid Date: Wed, 22 Nov 2023 21:19:59 -0500 Subject: [PATCH 08/21] fix --- tap_quickbooks/quickbooks/__init__.py | 10 ++++++---- .../reportstreams/ProfitAndLossDetailReport.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index e4602c7..096cf96 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -94,10 +94,12 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches } datetime_type = { - "format": "date-time", - "type": [ - "null", - "string" + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + string_type ] } diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index 1d1cacb..9df19a8 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -266,7 +266,7 @@ def sync(self, catalog_entry): except: if "Unrealized" in cleansed_row["Date"]: cleansed_row["TransactionType"] = cleansed_row["Date"] - cleansed_row["Date"] = None + cleansed_row["Date"] = end_date else: continue From fd37f916ad8244363a9dd1be8e2bf9593f1e56ca Mon Sep 17 00:00:00 2001 From: Hassan Syyid Date: Wed, 22 Nov 2023 21:24:08 -0500 Subject: [PATCH 09/21] disable --- .../quickbooks/reportstreams/ProfitAndLossDetailReport.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index 9df19a8..d12cad5 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -119,10 +119,10 @@ def sync(self, catalog_entry): "accounting_method": "Accrual", "columns": ",".join(cols), } - if self.pnl_adjusted_gain_loss: - params.update({"adjusted_gain_loss": "true"}) - #Don't send columns with this param - del params["columns"] + # if self.pnl_adjusted_gain_loss: + # params.update({"adjusted_gain_loss": "true"}) + # #Don't send columns with this param + # del params["columns"] LOGGER.info( f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" From 1cc571f544addecc872baff22881e1343822d986 Mon Sep 17 00:00:00 2001 From: Hassan Syyid Date: Wed, 22 Nov 2023 21:44:02 -0500 Subject: [PATCH 10/21] fix --- .../reportstreams/ProfitAndLossDetailReport.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index d12cad5..23b1a73 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -119,10 +119,10 @@ def sync(self, catalog_entry): "accounting_method": "Accrual", "columns": ",".join(cols), } - # if self.pnl_adjusted_gain_loss: - # params.update({"adjusted_gain_loss": "true"}) - # #Don't send columns with this param - # del params["columns"] + if self.pnl_adjusted_gain_loss: + params.update({"adjusted_gain_loss": "true"}) + #Don't send columns with this param + del params["columns"] LOGGER.info( f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" @@ -207,6 +207,10 @@ def sync(self, catalog_entry): "accounting_method": "Accrual", "columns": ",".join(cols), } + if self.pnl_adjusted_gain_loss: + params.update({"adjusted_gain_loss": "true"}) + #Don't send columns with this param + del params["columns"] LOGGER.info( f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" @@ -215,6 +219,7 @@ def sync(self, catalog_entry): # Get column metadata. columns = self._get_column_metadata(resp) + columns += ["Account"] # Recursively get row data. row_group = resp.get("Rows") From f9435feb4ac48a2bbf156eb88acc717ced91b7ac Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Sat, 25 Nov 2023 01:04:08 +0500 Subject: [PATCH 11/21] Missing and duplicate values fix for BalanceSheet and MonthlyBalanceSheetReport streams --- tap_quickbooks/quickbooks/__init__.py | 384 ++++++++++-------- .../reportstreams/BalanceSheetReport.py | 56 ++- .../MonthlyBalanceSheetReport.py | 25 +- 3 files changed, 270 insertions(+), 195 deletions(-) diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 096cf96..5c9075e 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -8,23 +8,40 @@ from requests.exceptions import RequestException import singer import singer.utils as singer_utils -import os; +import os from typing import Dict from singer import metadata, metrics -from tap_quickbooks.quickbooks.reportstreams.MonthlyBalanceSheetReport import MonthlyBalanceSheetReport -from tap_quickbooks.quickbooks.reportstreams.ProfitAndLossDetailReport import ProfitAndLossDetailReport -from tap_quickbooks.quickbooks.reportstreams.BalanceSheetReport import BalanceSheetReport -from tap_quickbooks.quickbooks.reportstreams.GeneralLedgerAccrualReport import GeneralLedgerAccrualReport -from tap_quickbooks.quickbooks.reportstreams.GeneralLedgerCashReport import GeneralLedgerCashReport +from tap_quickbooks.quickbooks.reportstreams.MonthlyBalanceSheetReport import ( + MonthlyBalanceSheetReport, +) +from tap_quickbooks.quickbooks.reportstreams.ProfitAndLossDetailReport import ( + ProfitAndLossDetailReport, +) +from tap_quickbooks.quickbooks.reportstreams.BalanceSheetReport import ( + BalanceSheetReport, +) +from tap_quickbooks.quickbooks.reportstreams.GeneralLedgerAccrualReport import ( + GeneralLedgerAccrualReport, +) +from tap_quickbooks.quickbooks.reportstreams.GeneralLedgerCashReport import ( + GeneralLedgerCashReport, +) from tap_quickbooks.quickbooks.reportstreams.CashFlowReport import CashFlowReport -from tap_quickbooks.quickbooks.reportstreams.DailyCashFlowReport import DailyCashFlowReport -from tap_quickbooks.quickbooks.reportstreams.MonthlyCashFlowReport import MonthlyCashFlowReport -from tap_quickbooks.quickbooks.reportstreams.TransactionListReport import TransactionListReport +from tap_quickbooks.quickbooks.reportstreams.DailyCashFlowReport import ( + DailyCashFlowReport, +) +from tap_quickbooks.quickbooks.reportstreams.MonthlyCashFlowReport import ( + MonthlyCashFlowReport, +) +from tap_quickbooks.quickbooks.reportstreams.TransactionListReport import ( + TransactionListReport, +) from tap_quickbooks.quickbooks.rest import Rest from tap_quickbooks.quickbooks.exceptions import ( TapQuickbooksException, - TapQuickbooksQuotaExceededException) + TapQuickbooksQuotaExceededException, +) LOGGER = singer.get_logger() @@ -35,7 +52,9 @@ def log_backoff_attempt(details): - LOGGER.info("ConnectionError detected, triggering backoff: %d try", details.get("tries")) + LOGGER.info( + "ConnectionError detected, triggering backoff: %d try", details.get("tries") + ) def _get_abs_path(path: str) -> str: @@ -43,16 +62,16 @@ def _get_abs_path(path: str) -> str: def _load_object_definitions() -> Dict: - '''Loads a JSON schema file for a given + """Loads a JSON schema file for a given Quickbooks Report resource into a dict representation. - ''' + """ schema_path = _get_abs_path("schemas") return singer.utils.load_json(f"{schema_path}/object_definition.json") def read_json_file(filename): # read file - with open(f"{filename}", 'r') as filetoread: + with open(f"{filename}", "r") as filetoread: data = filetoread.read() # parse file @@ -62,7 +81,7 @@ def read_json_file(filename): def write_json_file(filename, content): - with open(filename, 'w') as f: + with open(filename, "w") as f: json.dump(content, f, indent=4) @@ -71,55 +90,24 @@ def write_json_file(filename, content): def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches + number_type = {"type": ["null", "number"]} - number_type = { - "type": [ - "null", - "number" - ] - } + string_type = {"type": ["string", "null"]} - string_type = { - "type": [ - "string", - "null" - ] - } + boolean_type = {"type": ["boolean", "null"]} - boolean_type = { - "type": [ - "boolean", - "null" - ] - } + datetime_type = {"anyOf": [{"type": "string", "format": "date-time"}, string_type]} - datetime_type = { - "anyOf": [ - { - "type": "string", - "format": "date-time" - }, - string_type - ] - } - - object_type = { - "type": [ - "null", - "object" - ] - } + object_type = {"type": ["null", "object"]} - array_type = { - "type": ["null", "array"] - } + array_type = {"type": ["null", "array"]} ref_type = { "type": object_type["type"], "properties": { "value": string_type, "name": string_type, - } + }, } qb_types = { @@ -133,7 +121,7 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches "email": string_type, "address": string_type, "metadata": string_type, - "ref_type": ref_type + "ref_type": ref_type, } qb_types["custom_field"] = { @@ -142,8 +130,8 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches "DefinitionId": string_type, "Name": string_type, "Type": string_type, - "StringValue": string_type - } + "StringValue": string_type, + }, } qb_types["invoice_line"] = { @@ -164,30 +152,28 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches "Qty": number_type, "UnitPrice": number_type, "ServiceDate": qb_types["datetime"], - "Description" : string_type - } + "Description": string_type, + }, }, "SubTotalLineDetail": { "type": object_type["type"], - "properties": { - "ItemRef": qb_types["ref_type"] - } + "properties": {"ItemRef": qb_types["ref_type"]}, }, "DiscountLineDetail": { "type": object_type["type"], "properties": { "DiscountAccountRef": qb_types["object_reference"], - "DiscountPercent": number_type - } + "DiscountPercent": number_type, + }, }, "DescriptionLineDetail": { "type": object_type["type"], "properties": { "TaxCodeRef": qb_types["object_reference"], - "ServiceDate": qb_types["datetime"] - } - } - } + "ServiceDate": qb_types["datetime"], + }, + }, + }, } qb_types["journal_entry_line"] = { @@ -205,47 +191,48 @@ def field_to_property_schema(field, mdata): # pylint:disable=too-many-branches "type": object_type["type"], "properties": { "Type": string_type, - "EntityRef": qb_types["object_reference"] - } + "EntityRef": qb_types["object_reference"], + }, }, "AccountRef": qb_types["object_reference"], "ClassRef": qb_types["object_reference"], - "DepartmentRef": qb_types["object_reference"] - } - } - } + "DepartmentRef": qb_types["object_reference"], + }, + }, + }, } - qb_type = field['type'] + qb_type = field["type"] property_schema = qb_types[qb_type] - if qb_type == 'array': - property_schema["items"] = qb_types[field['child_type']] + if qb_type == "array": + property_schema["items"] = qb_types[field["child_type"]] return property_schema, mdata -class Quickbooks(): +class Quickbooks: # pylint: disable=too-many-instance-attributes,too-many-arguments - def __init__(self, - refresh_token=None, - token=None, - qb_client_id=None, - qb_client_secret=None, - quota_percent_per_run=None, - quota_percent_total=None, - is_sandbox=None, - include_deleted = None, - select_fields_by_default=None, - default_start_date=None, - api_type=None, - report_period_days = None, - gl_full_sync = None, - gl_weekly = None, - gl_daily = None, - gl_basic_fields = None, - realm_id=None, - pnl_adjusted_gain_loss = None - ): + def __init__( + self, + refresh_token=None, + token=None, + qb_client_id=None, + qb_client_secret=None, + quota_percent_per_run=None, + quota_percent_total=None, + is_sandbox=None, + include_deleted=None, + select_fields_by_default=None, + default_start_date=None, + api_type=None, + report_period_days=None, + gl_full_sync=None, + gl_weekly=None, + gl_daily=None, + gl_basic_fields=None, + realm_id=None, + pnl_adjusted_gain_loss=None, + ): self.api_type = api_type.upper() if api_type else None self.report_period_days = report_period_days self.gl_full_sync = gl_full_sync @@ -262,23 +249,36 @@ def __init__(self, self.access_token = None self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss - self.base_url = "https://sandbox-quickbooks.api.intuit.com/v3/company/" if is_sandbox is True else 'https://quickbooks.api.intuit.com/v3/company/' + self.base_url = ( + "https://sandbox-quickbooks.api.intuit.com/v3/company/" + if is_sandbox is True + else "https://quickbooks.api.intuit.com/v3/company/" + ) self.instance_url = f"{self.base_url}{realm_id}" LOGGER.info(f"Instance URL :- {self.instance_url}") - if isinstance(quota_percent_per_run, str) and quota_percent_per_run.strip() == '': + if ( + isinstance(quota_percent_per_run, str) + and quota_percent_per_run.strip() == "" + ): quota_percent_per_run = None - if isinstance(quota_percent_total, str) and quota_percent_total.strip() == '': + if isinstance(quota_percent_total, str) and quota_percent_total.strip() == "": quota_percent_total = None - self.quota_percent_per_run = float( - quota_percent_per_run) if quota_percent_per_run is not None else 25 - self.quota_percent_total = float( - quota_percent_total) if quota_percent_total is not None else 80 - self.is_sandbox = is_sandbox is True or (isinstance(is_sandbox, str) and is_sandbox.lower() == 'true') + self.quota_percent_per_run = ( + float(quota_percent_per_run) if quota_percent_per_run is not None else 25 + ) + self.quota_percent_total = ( + float(quota_percent_total) if quota_percent_total is not None else 80 + ) + self.is_sandbox = is_sandbox is True or ( + isinstance(is_sandbox, str) and is_sandbox.lower() == "true" + ) self.select_fields_by_default = select_fields_by_default is True or ( - isinstance(select_fields_by_default, str) and select_fields_by_default.lower() == 'true') + isinstance(select_fields_by_default, str) + and select_fields_by_default.lower() == "true" + ) self.default_start_date = default_start_date self.rest_requests_attempted = 0 self.jobs_completed = 0 @@ -294,7 +294,7 @@ def _get_standard_headers(self): # pylint: disable=anomalous-backslash-in-string,line-too-long def check_rest_quota_usage(self, headers): - match = re.search('^api-usage=(\d+)/(\d+)$', headers.get('Sforce-Limit-Info')) + match = re.search("^api-usage=(\d+)/(\d+)$", headers.get("Sforce-Limit-Info")) if match is None: return @@ -307,31 +307,42 @@ def check_rest_quota_usage(self, headers): max_requests_for_run = int((self.quota_percent_per_run * allotted) / 100) if percent_used_from_total > self.quota_percent_total: - total_message = ("Quickbooks has reported {}/{} ({:3.2f}%) total REST quota " + - "used across all Quickbooks Applications. Terminating " + - "replication to not continue past configured percentage " + - "of {}% total quota.").format(remaining, - allotted, - percent_used_from_total, - self.quota_percent_total) + total_message = ( + "Quickbooks has reported {}/{} ({:3.2f}%) total REST quota " + + "used across all Quickbooks Applications. Terminating " + + "replication to not continue past configured percentage " + + "of {}% total quota." + ).format( + remaining, allotted, percent_used_from_total, self.quota_percent_total + ) raise TapQuickbooksQuotaExceededException(total_message) elif self.rest_requests_attempted > max_requests_for_run: - partial_message = ("This replication job has made {} REST requests ({:3.2f}% of " + - "total quota). Terminating replication due to allotted " + - "quota of {}% per replication.").format(self.rest_requests_attempted, - (self.rest_requests_attempted / allotted) * 100, - self.quota_percent_per_run) + partial_message = ( + "This replication job has made {} REST requests ({:3.2f}% of " + + "total quota). Terminating replication due to allotted " + + "quota of {}% per replication." + ).format( + self.rest_requests_attempted, + (self.rest_requests_attempted / allotted) * 100, + self.quota_percent_per_run, + ) raise TapQuickbooksQuotaExceededException(partial_message) # pylint: disable=too-many-arguments - @backoff.on_exception(backoff.expo, - requests.exceptions.ConnectionError, - max_tries=10, - factor=2, - on_backoff=log_backoff_attempt) - def _make_request(self, http_method, url, headers=None, body=None, stream=False, params=None): + @backoff.on_exception( + backoff.expo, + requests.exceptions.ConnectionError, + max_tries=10, + factor=2, + on_backoff=log_backoff_attempt, + ) + def _make_request( + self, http_method, url, headers=None, body=None, stream=False, params=None + ): if http_method == "GET": - LOGGER.info("Making %s request to %s with params: %s", http_method, url, params) + LOGGER.info( + "Making %s request to %s with params: %s", http_method, url, params + ) resp = self.session.get(url, headers=headers, stream=stream, params=params) elif http_method == "POST": LOGGER.info("Making %s request to %s with body %s", http_method, url, body) @@ -344,7 +355,7 @@ def _make_request(self, http_method, url, headers=None, body=None, stream=False, except RequestException as ex: raise ex - if resp.headers.get('Sforce-Limit-Info') is not None: + if resp.headers.get("Sforce-Limit-Info") is not None: self.rest_requests_attempted += 1 self.check_rest_quota_usage(resp.headers) @@ -352,35 +363,43 @@ def _make_request(self, http_method, url, headers=None, body=None, stream=False, def login(self): if self.is_sandbox: - login_url = 'https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer' + login_url = "https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer" else: - login_url = 'https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer' + login_url = "https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer" - login_body = {'grant_type': 'refresh_token', 'client_id': self.qb_client_id, - 'client_secret': self.qb_client_secret, 'refresh_token': self.refresh_token} + login_body = { + "grant_type": "refresh_token", + "client_id": self.qb_client_id, + "client_secret": self.qb_client_secret, + "refresh_token": self.refresh_token, + } LOGGER.info("Attempting login via OAuth2") resp = None try: - resp = self._make_request("POST", login_url, body=login_body, - headers={"Content-Type": "application/x-www-form-urlencoded"}) + resp = self._make_request( + "POST", + login_url, + body=login_body, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ) LOGGER.info("OAuth2 login successful") auth = resp.json() - self.access_token = auth['access_token'] + self.access_token = auth["access_token"] + + new_refresh_token = auth["refresh_token"] - new_refresh_token = auth['refresh_token'] - # persist access_token parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', help='Config file', required=True) + parser.add_argument("-c", "--config", help="Config file", required=True) _args, unknown = parser.parse_known_args() config_file = _args.config config_content = read_json_file(config_file) - config_content['access_token'] = self.access_token + config_content["access_token"] = self.access_token write_json_file(config_file, config_content) # Check if the refresh token is update, if so update the config file with new refresh token. @@ -388,26 +407,32 @@ def login(self): LOGGER.info(f"Old refresh token [{self.refresh_token}] expired.") LOGGER.info("New Refresh token: {}".format(new_refresh_token)) parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', help='Config file', required=True) + parser.add_argument("-c", "--config", help="Config file", required=True) _args, unknown = parser.parse_known_args() config_file = _args.config config_content = read_json_file(config_file) - config_content['refresh_token'] = new_refresh_token + config_content["refresh_token"] = new_refresh_token write_json_file(config_file, config_content) self.refresh_token = new_refresh_token except Exception as e: error_message = str(e) - if resp is None and hasattr(e, 'response') and e.response is not None: # pylint:disable=no-member + if ( + resp is None and hasattr(e, "response") and e.response is not None + ): # pylint:disable=no-member resp = e.response # pylint:disable=no-member # NB: requests.models.Response is always falsy here. It is false if status code >= 400 if isinstance(resp, requests.models.Response): - error_message = error_message + ", Response from Quickbooks: {}".format(resp.text) + error_message = error_message + ", Response from Quickbooks: {}".format( + resp.text + ) raise Exception(error_message) from e finally: LOGGER.info("Starting new login timer") - self.login_timer = threading.Timer(REFRESH_TOKEN_EXPIRATION_PERIOD, self.login) + self.login_timer = threading.Timer( + REFRESH_TOKEN_EXPIRATION_PERIOD, self.login + ) self.login_timer.start() def describe(self, sobject=None): @@ -419,34 +444,40 @@ def describe(self, sobject=None): # pylint: disable=no-self-use def _get_selected_properties(self, catalog_entry): - mdata = metadata.to_map(catalog_entry['metadata']) - properties = catalog_entry['schema'].get('properties', {}) - - return [k for k in properties.keys() - if singer.should_sync_field(metadata.get(mdata, ('properties', k), 'inclusion'), - metadata.get(mdata, ('properties', k), 'selected'), - self.select_fields_by_default)] + mdata = metadata.to_map(catalog_entry["metadata"]) + properties = catalog_entry["schema"].get("properties", {}) + + return [ + k + for k in properties.keys() + if singer.should_sync_field( + metadata.get(mdata, ("properties", k), "inclusion"), + metadata.get(mdata, ("properties", k), "selected"), + self.select_fields_by_default, + ) + ] def get_start_date(self, state, catalog_entry): - catalog_metadata = metadata.to_map(catalog_entry['metadata']) - replication_key = catalog_metadata.get((), {}).get('replication-key') + catalog_metadata = metadata.to_map(catalog_entry["metadata"]) + replication_key = catalog_metadata.get((), {}).get("replication-key") - return (singer.get_bookmark(state, - catalog_entry['tap_stream_id'], - replication_key) or self.default_start_date) + return ( + singer.get_bookmark(state, catalog_entry["tap_stream_id"], replication_key) + or self.default_start_date + ) - def _build_query_string(self, catalog_entry, start_date, end_date=None, order_by_clause=True): + def _build_query_string( + self, catalog_entry, start_date, end_date=None, order_by_clause=True + ): selected_properties = self._get_selected_properties(catalog_entry) - query = "SELECT {} FROM {}".format("*", catalog_entry['stream']) + query = "SELECT {} FROM {}".format("*", catalog_entry["stream"]) - catalog_metadata = metadata.to_map(catalog_entry['metadata']) - replication_key = catalog_metadata.get((), {}).get('replication-key') + catalog_metadata = metadata.to_map(catalog_entry["metadata"]) + replication_key = catalog_metadata.get((), {}).get("replication-key") if replication_key: - where_clause = " WHERE {} > '{}' ".format( - replication_key, - start_date) + where_clause = " WHERE {} > '{}' ".format(replication_key, start_date) if end_date: end_date_clause = " AND {} <= {}".format(replication_key, end_date) else: @@ -466,15 +497,27 @@ def query(self, catalog_entry, state, state_passed): return rest.query(catalog_entry, state) else: raise TapQuickbooksException( - "api_type should be REST was: {}".format( - self.api_type)) + "api_type should be REST was: {}".format(self.api_type) + ) def query_report(self, catalog_entry, state, state_passed): - start_date = singer_utils.strptime_with_tz(self.get_start_date(state, catalog_entry)) + start_date = singer_utils.strptime_with_tz( + self.get_start_date(state, catalog_entry) + ) if catalog_entry["stream"] == "BalanceSheetReport": - reader = BalanceSheetReport(self, start_date, state_passed) + reader = BalanceSheetReport( + self, + start_date, + state_passed, + pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss, + ) elif catalog_entry["stream"] == "MonthlyBalanceSheetReport": - reader = MonthlyBalanceSheetReport(self, start_date, state_passed) + reader = MonthlyBalanceSheetReport( + self, + start_date, + state_passed, + pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss, + ) elif catalog_entry["stream"] == "GeneralLedgerAccrualReport": reader = GeneralLedgerAccrualReport(self, start_date, state_passed) elif catalog_entry["stream"] == "GeneralLedgerCashReport": @@ -488,5 +531,10 @@ def query_report(self, catalog_entry, state, state_passed): elif catalog_entry["stream"] == "TransactionListReport": reader = TransactionListReport(self, start_date, state_passed) else: - reader = ProfitAndLossDetailReport(self, start_date, state_passed,pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss) + reader = ProfitAndLossDetailReport( + self, + start_date, + state_passed, + pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss, + ) return reader.sync(catalog_entry) diff --git a/tap_quickbooks/quickbooks/reportstreams/BalanceSheetReport.py b/tap_quickbooks/quickbooks/reportstreams/BalanceSheetReport.py index 6772c80..886c955 100644 --- a/tap_quickbooks/quickbooks/reportstreams/BalanceSheetReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/BalanceSheetReport.py @@ -9,16 +9,18 @@ LOGGER = singer.get_logger() NUMBER_OF_PERIODS = 3 + class BalanceSheetReport(QuickbooksStream): - tap_stream_id: ClassVar[str] = 'BalanceSheetReport' - stream: ClassVar[str] = 'BalanceSheetReport' + tap_stream_id: ClassVar[str] = "BalanceSheetReport" + stream: ClassVar[str] = "BalanceSheetReport" key_properties: ClassVar[List[str]] = [] - replication_method: ClassVar[str] = 'FULL_TABLE' + replication_method: ClassVar[str] = "FULL_TABLE" - def __init__(self, qb, start_date, state_passed): + def __init__(self, qb, start_date, state_passed, pnl_adjusted_gain_loss=None): self.qb = qb self.start_date = start_date self.state_passed = state_passed + self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss def _get_column_metadata(self, resp): columns = [] @@ -33,8 +35,9 @@ def _get_column_metadata(self, resp): return columns def _recursive_row_search(self, row, output, categories): + header = None row_group = row.get("Rows") - if 'ColData' in list(row.keys()): + if "ColData" in list(row.keys()): # Write the row data = row.get("ColData") values = [column.get("value") for column in data] @@ -45,12 +48,21 @@ def _recursive_row_search(self, row, output, categories): elif row_group is None or row_group == {}: pass else: - row_array = row_group.get("Row") + # row_array = row_group.get("Row") header = row.get("Header") if header is not None: categories.append(header.get("ColData")[0].get("value")) - for row in row_array: - self._recursive_row_search(row, output, categories) + for key, row_item in row.items(): + if isinstance(row_item, str): + continue + if "ColData" in list(row_item.keys()): + self._recursive_row_search(row_item, output, categories) + elif "Row" in list(row_item.keys()): + for sub_row in row_item["Row"]: + self._recursive_row_search(sub_row, output, categories) + elif isinstance(row_item.get(key), dict): + if key in row_item: + self._recursive_row_search(row_item[key], output, categories) if header is not None: categories.pop() @@ -64,11 +76,15 @@ def sync(self, catalog_entry): params = { "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), - "accounting_method": "Accrual" + "accounting_method": "Accrual", } - - LOGGER.info(f"Fetch BalanceSheet Report for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='BalanceSheet', params=params) + if self.pnl_adjusted_gain_loss: + params.update({"adjusted_gain_loss": "true"}) + + LOGGER.info( + f"Fetch BalanceSheet Report for period {params['start_date']} to {params['end_date']}" + ) + resp = self._get(report_entity="BalanceSheet", params=params) # Get column metadata. columns = self._get_column_metadata(resp) @@ -100,7 +116,9 @@ def sync(self, catalog_entry): cleansed_row.update({k: v}) cleansed_row["Total"] = float(row.get("Total")) - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) yield cleansed_row else: @@ -112,11 +130,13 @@ def sync(self, catalog_entry): params = { "start_date": start_date.strftime("%Y-%m-%d"), "end_date": end_date.strftime("%Y-%m-%d"), - "accounting_method": "Accrual" + "accounting_method": "Accrual", } - LOGGER.info(f"Fetch BalanceSheet for period {params['start_date']} to {params['end_date']}") - resp = self._get(report_entity='BalanceSheet', params=params) + LOGGER.info( + f"Fetch BalanceSheet for period {params['start_date']} to {params['end_date']}" + ) + resp = self._get(report_entity="BalanceSheet", params=params) # Get column metadata. columns = self._get_column_metadata(resp) @@ -150,7 +170,9 @@ def sync(self, catalog_entry): cleansed_row.update({k: v}) cleansed_row["Total"] = float(row.get("Total")) - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( + singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" + ) yield cleansed_row diff --git a/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py b/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py index 3b756ae..2ec53ac 100644 --- a/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/MonthlyBalanceSheetReport.py @@ -35,6 +35,7 @@ def _get_column_metadata(self, resp): return columns def _recursive_row_search(self, row, output, categories): + header = None row_group = row.get("Rows") if "ColData" in list(row.keys()): # Write the row @@ -47,19 +48,23 @@ def _recursive_row_search(self, row, output, categories): elif row_group is None or row_group == {}: pass else: - row_array = row_group.get("Row") + # row_array = row_group.get("Row") header = row.get("Header") - summary = row.get("Summary") if header is not None: categories.append(header.get("ColData")[0].get("value")) - if summary is not None: - categories.append(summary.get("ColData")[0].get("value")) - for row in row_array: - self._recursive_row_search(row, output, categories) - if header is not None: - self._recursive_row_search(header, output, categories) - if summary is not None: - self._recursive_row_search(summary, output, categories) + for key, row_item in row.items(): + if isinstance(row_item, str): + continue + if "ColData" in list(row_item.keys()): + self._recursive_row_search(row_item, output, categories) + elif "Row" in list(row_item.keys()): + for sub_row in row_item["Row"]: + self._recursive_row_search(sub_row, output, categories) + elif isinstance(row_item.get(key), dict): + if key in row_item: + self._recursive_row_search( + row_item[key], output, categories + ) if header is not None: categories.pop() From ac60cd6d5c35644d1db77f1cf7213d5053655ac3 Mon Sep 17 00:00:00 2001 From: Adil Ahmed Date: Sat, 25 Nov 2023 02:34:27 +0500 Subject: [PATCH 12/21] Monthly report support for ProfitAndLossDetailreport stream --- tap_quickbooks/__init__.py | 260 ++++++++++-------- tap_quickbooks/quickbooks/__init__.py | 3 + .../ProfitAndLossDetailReport.py | 30 +- 3 files changed, 172 insertions(+), 121 deletions(-) diff --git a/tap_quickbooks/__init__.py b/tap_quickbooks/__init__.py index 4c0d463..6597029 100644 --- a/tap_quickbooks/__init__.py +++ b/tap_quickbooks/__init__.py @@ -5,82 +5,91 @@ import singer.utils as singer_utils from singer import metadata, metrics import tap_quickbooks.quickbooks as quickbooks -from tap_quickbooks.sync import (sync_stream, get_stream_version) +from tap_quickbooks.sync import sync_stream, get_stream_version from tap_quickbooks.quickbooks import Quickbooks from tap_quickbooks.quickbooks.exceptions import ( - TapQuickbooksException, TapQuickbooksQuotaExceededException) + TapQuickbooksException, + TapQuickbooksQuotaExceededException, +) LOGGER = singer.get_logger() REQUIRED_CONFIG_KEYS = [ - 'refresh_token', - 'client_id', - 'client_secret', - 'start_date', - 'realmId', - 'select_fields_by_default' - ] + "refresh_token", + "client_id", + "client_secret", + "start_date", + "realmId", + "select_fields_by_default", +] CONFIG = { - 'refresh_token': None, - 'client_id': None, - 'client_secret': None, - 'start_date': None, - 'include_deleted': None + "refresh_token": None, + "client_id": None, + "client_secret": None, + "start_date": None, + "include_deleted": None, } -REPLICATION_KEY="MetaData.LastUpdatedTime" +REPLICATION_KEY = "MetaData.LastUpdatedTime" + def stream_is_selected(mdata): - return mdata.get((), {}).get('selected', False) + return mdata.get((), {}).get("selected", False) + def build_state(raw_state, catalog): state = {} - for catalog_entry in catalog['streams']: - tap_stream_id = catalog_entry['tap_stream_id'] - catalog_metadata = metadata.to_map(catalog_entry['metadata']) - replication_method = catalog_metadata.get((), {}).get('replication-method') + for catalog_entry in catalog["streams"]: + tap_stream_id = catalog_entry["tap_stream_id"] + catalog_metadata = metadata.to_map(catalog_entry["metadata"]) + replication_method = catalog_metadata.get((), {}).get("replication-method") - version = singer.get_bookmark(raw_state, - tap_stream_id, - 'version') + version = singer.get_bookmark(raw_state, tap_stream_id, "version") # Preserve state that deals with resuming an incomplete bulk job - if singer.get_bookmark(raw_state, tap_stream_id, 'JobID'): - job_id = singer.get_bookmark(raw_state, tap_stream_id, 'JobID') - batches = singer.get_bookmark(raw_state, tap_stream_id, 'BatchIDs') - current_bookmark = singer.get_bookmark(raw_state, tap_stream_id, 'JobHighestBookmarkSeen') - state = singer.write_bookmark(state, tap_stream_id, 'JobID', job_id) - state = singer.write_bookmark(state, tap_stream_id, 'BatchIDs', batches) - state = singer.write_bookmark(state, tap_stream_id, 'JobHighestBookmarkSeen', current_bookmark) - - if replication_method == 'INCREMENTAL': - replication_key = catalog_metadata.get((), {}).get('replication-key') - replication_key_value = singer.get_bookmark(raw_state, - tap_stream_id, - replication_key) + if singer.get_bookmark(raw_state, tap_stream_id, "JobID"): + job_id = singer.get_bookmark(raw_state, tap_stream_id, "JobID") + batches = singer.get_bookmark(raw_state, tap_stream_id, "BatchIDs") + current_bookmark = singer.get_bookmark( + raw_state, tap_stream_id, "JobHighestBookmarkSeen" + ) + state = singer.write_bookmark(state, tap_stream_id, "JobID", job_id) + state = singer.write_bookmark(state, tap_stream_id, "BatchIDs", batches) + state = singer.write_bookmark( + state, tap_stream_id, "JobHighestBookmarkSeen", current_bookmark + ) + + if replication_method == "INCREMENTAL": + replication_key = catalog_metadata.get((), {}).get("replication-key") + replication_key_value = singer.get_bookmark( + raw_state, tap_stream_id, replication_key + ) if version is not None: - state = singer.write_bookmark( - state, tap_stream_id, 'version', version) + state = singer.write_bookmark(state, tap_stream_id, "version", version) if replication_key_value is not None: state = singer.write_bookmark( - state, tap_stream_id, replication_key, replication_key_value) - elif replication_method == 'FULL_TABLE' and version is None: - state = singer.write_bookmark(state, tap_stream_id, 'version', version) + state, tap_stream_id, replication_key, replication_key_value + ) + elif replication_method == "FULL_TABLE" and version is None: + state = singer.write_bookmark(state, tap_stream_id, "version", version) return state + # pylint: disable=undefined-variable def create_property_schema(field, mdata): - field_name = field['name'] + field_name = field["name"] if field_name == "Id": mdata = metadata.write( - mdata, ('properties', field_name), 'inclusion', 'automatic') + mdata, ("properties", field_name), "inclusion", "automatic" + ) else: mdata = metadata.write( - mdata, ('properties', field_name), 'inclusion', 'available') + mdata, ("properties", field_name), "inclusion", "available" + ) property_schema, mdata = quickbooks.field_to_property_schema(field, mdata) @@ -90,8 +99,8 @@ def create_property_schema(field, mdata): # pylint: disable=too-many-branches,too-many-statements def do_discover(qb): """Describes a Quickbooks instance's objects and generates a JSON schema for each field.""" - objects_to_discover = qb.describe() - key_properties = ['Id'] + objects_to_discover = qb.describe() + key_properties = ["Id"] qb_custom_setting_objects = [] object_to_tag_references = {} @@ -100,70 +109,72 @@ def do_discover(qb): entries = [] for sobject_name in objects_to_discover: - fields = qb.describe(sobject_name) replication_key = REPLICATION_KEY - if sobject_name.endswith('Report'): + if sobject_name.endswith("Report"): replication_key = None - properties = {} mdata = metadata.new() # Loop over the object's fields for f in fields: - field_name = f['name'] + field_name = f["name"] - property_schema, mdata = create_property_schema( - f, mdata) + property_schema, mdata = create_property_schema(f, mdata) - inclusion = metadata.get( - mdata, ('properties', field_name), 'inclusion') + inclusion = metadata.get(mdata, ("properties", field_name), "inclusion") if qb.select_fields_by_default: mdata = metadata.write( - mdata, ('properties', field_name), 'selected-by-default', True) + mdata, ("properties", field_name), "selected-by-default", True + ) properties[field_name] = property_schema if replication_key: mdata = metadata.write( - mdata, ('properties', replication_key), 'inclusion', 'automatic') + mdata, ("properties", replication_key), "inclusion", "automatic" + ) if replication_key: mdata = metadata.write( - mdata, (), 'valid-replication-keys', [replication_key]) + mdata, (), "valid-replication-keys", [replication_key] + ) else: mdata = metadata.write( mdata, (), - 'forced-replication-method', + "forced-replication-method", { - 'replication-method': 'FULL_TABLE', - 'reason': 'No replication keys found from the Quickbooks API'}) - if sobject_name in ["GeneralLedgerCashReport","GeneralLedgerAccrualReport"]: + "replication-method": "FULL_TABLE", + "reason": "No replication keys found from the Quickbooks API", + }, + ) + if sobject_name in ["GeneralLedgerCashReport", "GeneralLedgerAccrualReport"]: key_properties = [] - mdata = metadata.write(mdata, (), 'table-key-properties', key_properties) + mdata = metadata.write(mdata, (), "table-key-properties", key_properties) schema = { - 'type': 'object', - 'additionalProperties': False, - 'properties': properties + "type": "object", + "additionalProperties": False, + "properties": properties, } entry = { - 'stream': sobject_name, - 'tap_stream_id': sobject_name, - 'schema': schema, - 'metadata': metadata.to_list(mdata) + "stream": sobject_name, + "tap_stream_id": sobject_name, + "schema": schema, + "metadata": metadata.to_list(mdata), } entries.append(entry) - result = {'streams': entries} + result = {"streams": entries} json.dump(result, sys.stdout, indent=4) + def do_sync(qb, catalog, state, state_passed): starting_stream = state.get("current_stream") @@ -174,16 +185,17 @@ def do_sync(qb, catalog, state, state_passed): for catalog_entry in catalog["streams"]: stream_version = get_stream_version(catalog_entry, state) - stream = catalog_entry['stream'] - stream_alias = catalog_entry.get('stream_alias') + stream = catalog_entry["stream"] + stream_alias = catalog_entry.get("stream_alias") stream_name = catalog_entry["tap_stream_id"] activate_version_message = singer.ActivateVersionMessage( - stream=(stream_alias or stream), version=stream_version) + stream=(stream_alias or stream), version=stream_version + ) - catalog_metadata = metadata.to_map(catalog_entry['metadata']) - replication_key = catalog_metadata.get((), {}).get('replication-key') + catalog_metadata = metadata.to_map(catalog_entry["metadata"]) + replication_key = catalog_metadata.get((), {}).get("replication-key") - mdata = metadata.to_map(catalog_entry['metadata']) + mdata = metadata.to_map(catalog_entry["metadata"]) if not stream_is_selected(mdata): LOGGER.info("%s: Skipping - not selected", stream_name) @@ -201,15 +213,20 @@ def do_sync(qb, catalog, state, state_passed): state["current_stream"] = stream_name singer.write_state(state) - key_properties = metadata.to_map(catalog_entry['metadata']).get((), {}).get('table-key-properties') + key_properties = ( + metadata.to_map(catalog_entry["metadata"]) + .get((), {}) + .get("table-key-properties") + ) singer.write_schema( stream, - catalog_entry['schema'], + catalog_entry["schema"], key_properties, replication_key, - stream_alias) + stream_alias, + ) - job_id = singer.get_bookmark(state, catalog_entry['tap_stream_id'], 'JobID') + job_id = singer.get_bookmark(state, catalog_entry["tap_stream_id"], "JobID") if job_id: with metrics.record_counter(stream) as counter: # Remove Job info from state once we complete this resumed query. One of a few cases could have occurred: @@ -217,30 +234,41 @@ def do_sync(qb, catalog, state, state_passed): # 2. The job partially completed, in which case make JobHighestBookmarkSeen the new bookmark, or # existing bookmark if no bookmark exists for the Job. # 3. The job completely failed, in which case maintain the existing bookmark, or None if no bookmark - state.get('bookmarks', {}).get(catalog_entry['tap_stream_id'], {}).pop('JobID', None) - state.get('bookmarks', {}).get(catalog_entry['tap_stream_id'], {}).pop('BatchIDs', None) - bookmark = state.get('bookmarks', {}).get(catalog_entry['tap_stream_id'], {}) \ - .pop('JobHighestBookmarkSeen', None) - existing_bookmark = state.get('bookmarks', {}).get(catalog_entry['tap_stream_id'], {}) \ - .pop(replication_key, None) + state.get("bookmarks", {}).get(catalog_entry["tap_stream_id"], {}).pop( + "JobID", None + ) + state.get("bookmarks", {}).get(catalog_entry["tap_stream_id"], {}).pop( + "BatchIDs", None + ) + bookmark = ( + state.get("bookmarks", {}) + .get(catalog_entry["tap_stream_id"], {}) + .pop("JobHighestBookmarkSeen", None) + ) + existing_bookmark = ( + state.get("bookmarks", {}) + .get(catalog_entry["tap_stream_id"], {}) + .pop(replication_key, None) + ) state = singer.write_bookmark( state, - catalog_entry['tap_stream_id'], + catalog_entry["tap_stream_id"], replication_key, - bookmark or existing_bookmark) # If job is removed, reset to existing bookmark or None + bookmark or existing_bookmark, + ) # If job is removed, reset to existing bookmark or None singer.write_state(state) else: # Tables with a replication_key or an empty bookmark will emit an # activate_version at the beginning of their sync - bookmark_is_empty = state.get('bookmarks', {}).get( - catalog_entry['tap_stream_id']) is None + bookmark_is_empty = ( + state.get("bookmarks", {}).get(catalog_entry["tap_stream_id"]) is None + ) if replication_key or bookmark_is_empty: singer.write_message(activate_version_message) - state = singer.write_bookmark(state, - catalog_entry['tap_stream_id'], - 'version', - stream_version) + state = singer.write_bookmark( + state, catalog_entry["tap_stream_id"], "version", stream_version + ) counter = sync_stream(qb, catalog_entry, state, state_passed) LOGGER.info("%s: Completed sync (%s rows)", stream_name, counter.value) @@ -248,6 +276,7 @@ def do_sync(qb, catalog, state, state_passed): singer.write_state(state) LOGGER.info("Finished sync") + def main_impl(): args = singer_utils.parse_args(REQUIRED_CONFIG_KEYS) @@ -257,24 +286,25 @@ def main_impl(): qb = None try: qb = Quickbooks( - refresh_token=CONFIG['refresh_token'], - qb_client_id=CONFIG['client_id'], - qb_client_secret=CONFIG['client_secret'], - quota_percent_total=CONFIG.get('quota_percent_total'), - quota_percent_per_run=CONFIG.get('quota_percent_per_run'), - is_sandbox=CONFIG.get('is_sandbox'), - select_fields_by_default=CONFIG.get('select_fields_by_default'), - default_start_date=CONFIG.get('start_date'), - include_deleted = CONFIG.get('include_deleted'), - api_type='REST', - realm_id = CONFIG.get('realmId'), - report_period_days = CONFIG.get('report_period_days'), - gl_full_sync = CONFIG.get('gl_full_sync'), - gl_weekly = CONFIG.get('gl_weekly', False), - gl_daily = CONFIG.get('gl_daily', False), - gl_basic_fields = CONFIG.get('gl_basic_fields', False), - pnl_adjusted_gain_loss = CONFIG.get('pnl_adjusted_gain_loss', False) - ) + refresh_token=CONFIG["refresh_token"], + qb_client_id=CONFIG["client_id"], + qb_client_secret=CONFIG["client_secret"], + quota_percent_total=CONFIG.get("quota_percent_total"), + quota_percent_per_run=CONFIG.get("quota_percent_per_run"), + is_sandbox=CONFIG.get("is_sandbox"), + select_fields_by_default=CONFIG.get("select_fields_by_default"), + default_start_date=CONFIG.get("start_date"), + include_deleted=CONFIG.get("include_deleted"), + api_type="REST", + realm_id=CONFIG.get("realmId"), + report_period_days=CONFIG.get("report_period_days"), + gl_full_sync=CONFIG.get("gl_full_sync"), + gl_weekly=CONFIG.get("gl_weekly", False), + gl_daily=CONFIG.get("gl_daily", False), + gl_basic_fields=CONFIG.get("gl_basic_fields", False), + pnl_adjusted_gain_loss=CONFIG.get("pnl_adjusted_gain_loss", False), + pnl_monthly=CONFIG.get("pnl_monthly", False), + ) qb.login() if args.discover: @@ -289,7 +319,8 @@ def main_impl(): if qb.rest_requests_attempted > 0: LOGGER.debug( "This job used %s REST requests towards the Quickbooks quota.", - qb.rest_requests_attempted) + qb.rest_requests_attempted, + ) if qb.login_timer: qb.login_timer.cancel() @@ -307,5 +338,6 @@ def main(): LOGGER.critical(e) raise e + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 5c9075e..dc49070 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -232,6 +232,7 @@ def __init__( gl_basic_fields=None, realm_id=None, pnl_adjusted_gain_loss=None, + pnl_monthly=None, ): self.api_type = api_type.upper() if api_type else None self.report_period_days = report_period_days @@ -248,6 +249,7 @@ def __init__( self.session = requests.Session() self.access_token = None self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss + self.pnl_monthly = pnl_monthly self.base_url = ( "https://sandbox-quickbooks.api.intuit.com/v3/company/" @@ -536,5 +538,6 @@ def query_report(self, catalog_entry, state, state_passed): start_date, state_passed, pnl_adjusted_gain_loss=self.pnl_adjusted_gain_loss, + pnl_monthly=self.pnl_monthly, ) return reader.sync(catalog_entry) diff --git a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py index 23b1a73..be3e4a8 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ProfitAndLossDetailReport.py @@ -6,6 +6,7 @@ from tap_quickbooks.quickbooks.rest_reports import QuickbooksStream from tap_quickbooks.sync import transform_data_hook from dateutil.parser import parse +import calendar LOGGER = singer.get_logger() NUMBER_OF_PERIODS = 3 @@ -18,11 +19,19 @@ class ProfitAndLossDetailReport(QuickbooksStream): replication_method: ClassVar[str] = "FULL_TABLE" current_account = {} - def __init__(self, qb, start_date, state_passed, pnl_adjusted_gain_loss=None): + def __init__( + self, + qb, + start_date, + state_passed, + pnl_adjusted_gain_loss=None, + pnl_monthly=None, + ): self.qb = qb self.start_date = start_date self.state_passed = state_passed self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss + self.pnl_monthly = pnl_monthly def _get_column_metadata(self, resp): columns = [] @@ -62,6 +71,10 @@ def _recursive_row_search(self, row, output, categories): if header is not None: categories.pop() + def get_days_in_month(self, start_date): + _, days_in_month = calendar.monthrange(start_date.year, start_date.month) + return days_in_month - 1 + def sync(self, catalog_entry): full_sync = not self.state_passed @@ -107,6 +120,9 @@ def sync(self, catalog_entry): start_date = self.start_date.date() delta = 364 + if self.pnl_monthly: + delta = self.get_days_in_month(start_date) + while start_date < datetime.date.today(): LOGGER.info(f"Starting full sync of P&L") end_date = start_date + datetime.timedelta(delta) @@ -121,17 +137,17 @@ def sync(self, catalog_entry): } if self.pnl_adjusted_gain_loss: params.update({"adjusted_gain_loss": "true"}) - #Don't send columns with this param + # Don't send columns with this param del params["columns"] LOGGER.info( f"Fetch Journal Report for period {params['start_date']} to {params['end_date']}" ) - LOGGER.info( - f"Fetch Report with params {params}" - ) + LOGGER.info(f"Fetch Report with params {params}") resp = self._get(report_entity="ProfitAndLossDetail", params=params) start_date = end_date + datetime.timedelta(1) + if self.pnl_monthly: + delta = self.get_days_in_month(start_date) # Get column metadata. columns = self._get_column_metadata(resp) @@ -209,7 +225,7 @@ def sync(self, catalog_entry): } if self.pnl_adjusted_gain_loss: params.update({"adjusted_gain_loss": "true"}) - #Don't send columns with this param + # Don't send columns with this param del params["columns"] LOGGER.info( @@ -264,7 +280,7 @@ def sync(self, catalog_entry): cleansed_row["SyncTimestampUtc"] = singer.utils.strftime( singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ" ) - + if cleansed_row.get("Date"): try: cleansed_row["Date"] = parse(cleansed_row["Date"]) From 7e5b0135509c3d65216f8f90c56872861806a048 Mon Sep 17 00:00:00 2001 From: Vinicius Mesel Date: Thu, 30 Nov 2023 12:22:06 -0300 Subject: [PATCH 13/21] Adds new flags for P/L Detail Report and Monthly Balance Sheet (#79) Co-authored-by: Hassan Syyid --- .DS_Store | Bin 8196 -> 0 bytes .gitignore | 1 + tap_quickbooks/__init__.py | 35 +++++++------ tap_quickbooks/quickbooks/__init__.py | 47 ++++++++++-------- .../MonthlyBalanceSheetReport.py | 10 ++-- .../ProfitAndLossDetailReport.py | 2 +- 6 files changed, 49 insertions(+), 46 deletions(-) delete mode 100644 .DS_Store diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index e32bfdc5cbf3d5772f5956fccaeeb3c9f5e05aaa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHLU2GIp6u#fI(3vTfDFQ7!0xJ!+DTQteQhxkd`Xe+&OW6KVs>|%oKu4xCWoLGa zHq{zm6czv8jPb!2Oo$jvjQEP07!&`n@h8CtW7H=TUwq)jc<$UOp?@zDBMEbpd(Yf^ z&bjBFGvB$hXBlH?FPbfk)iTCp>YQqoG%V6MpZ5h#idM>rg7g_HFpCAu&rSH_3#8+P zI0A75;t0eMh$9e3;8utLeY1I!m-y}rZal^jh$C=YMu6WBN$Q-YBRVZ;cy-VamHe$P2xj(8vD#vR)aQmOACm6b1BUO}z0;?c~g zH<1a9py;<7g`<4d=Xp+U_Swh{*w#c=TixNgfnmE=AylwUi$q2SZP)ZB+P$J{`k`({ zFcfN8t!8p^M`}k?GBwqdoZPj&wJF(>YTG?ErO0dQn)e>go^-}s?|I<~qT7H~&GYQc zVs1HpsYAF`rp~(cL*$mMsFiyEfI5`bR+Z@Nz2kxHx&DAv@QG%%l|K2{o%h^B1&i|s z^PW>2bN#GVoj06((J=yRpW|fgCoO*{uucd3Y1sD)K}RT<)wH5-9kxi$E?J!vIT|de>9R-UVqW_Osywc+`CHEhw|Ro7`f<2YY(P%{V{I#py3C-ubv zX@&=?U0YM7ck4Z}c10fcM~#BjJ3&~RiPn2%Iec-n&UfoF?ZCUv%y6b=yCs`oOs`m= zS*}Yr2q?{oic*(OMF^AGI!c!`JHSRLWyaaF>_v8#z0BTX=h*w~61&VkXJ4@^>{~X& zeqz6{KiFUFZ!iE7Dp89Bblio_XhAEs<38*|2RhM(N03231~7k_ zG@iq&cnz=PO`ONOxPbR?377FPKEW6G5?|vRT*a^W4Zq`>BuQmbxl|+7N_R`!q-Jq$ zX*N@iM1Dj>+C>iwE&$4uagiyKu~?=gI=j^U|4*j;xI*N{ovYVusB75TN*Qw7=?zCai1A#B{S-FB8hO zjeViFBe}rcBV} ze-F{O*?D%6eZ)Q`h>2^H0@bJ?XmVg~Ks_4Jh%MNPooK^u>>-eL;UOGA zH+s-VARWXIhLMK}3obmILIFVp)Ms!8FW@Dd#VZ8aH*gN`%md(oxd6n)#Q<2!6kNx1 zPY`bwxO3Iw+093wOl(oh>;JB$|Np;vL-7uA1mXzXvJpUeXQs2Aj Date: Fri, 8 Dec 2023 16:27:44 -0500 Subject: [PATCH 14/21] fix offset calculation --- tap_quickbooks/quickbooks/rest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tap_quickbooks/quickbooks/rest.py b/tap_quickbooks/quickbooks/rest.py index 0209ded..d30c1db 100644 --- a/tap_quickbooks/quickbooks/rest.py +++ b/tap_quickbooks/quickbooks/rest.py @@ -99,8 +99,9 @@ def _sync_records(self, url, headers, params, stream): headers["Content-Type"] = "application/json" query = params['query'] - offset = 0; + offset = 0 max = 100 + page = 0 while True: headers.update(self.qb._get_standard_headers()) records_deleted = [] @@ -128,6 +129,7 @@ def _sync_records(self, url, headers, params, stream): if count == 0: break; + page += 1 records = resp_json['QueryResponse'][stream]; records = records + records_deleted @@ -138,4 +140,4 @@ def _sync_records(self, url, headers, params, stream): if count < max: break; - offset += max + offset = (max * page) + 1 From 06757ea96568e0cf2fae55029100eda59f3d133d Mon Sep 17 00:00:00 2001 From: xacadil <92389481+xacadil@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:18:11 +0500 Subject: [PATCH 15/21] Response exception message and handling. (#100) --- tap_quickbooks/quickbooks/rest.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/tap_quickbooks/quickbooks/rest.py b/tap_quickbooks/quickbooks/rest.py index 761f084..9b23604 100644 --- a/tap_quickbooks/quickbooks/rest.py +++ b/tap_quickbooks/quickbooks/rest.py @@ -63,17 +63,18 @@ def _query_recur( yield record except HTTPError as ex: - response = ex.response.json() - if isinstance(response, list) and response[0].get("errorCode") == "QUERY_TIMEOUT": - start_date = singer_utils.strptime_with_tz(start_date_str) - day_range = (end_date - start_date).days - LOGGER.info( - "Quickbooks returned QUERY_TIMEOUT querying %d days of %s", - day_range, - catalog_entry['stream']) - retryable = True - else: - raise ex + try: + response = ex.response.json() + if isinstance(response, list) and response[0].get("errorCode") == "QUERY_TIMEOUT": + start_date = singer_utils.strptime_with_tz(start_date_str) + day_range = (end_date - start_date).days + LOGGER.info( + "Quickbooks returned QUERY_TIMEOUT querying %d days of %s", + day_range, + catalog_entry['stream']) + retryable = True + except: + raise ex if retryable: start_date = singer_utils.strptime_with_tz(start_date_str) From e49a6b483e8a0a7005d64af6899f8827a43bf1ca Mon Sep 17 00:00:00 2001 From: xacadil <92389481+xacadil@users.noreply.github.com> Date: Thu, 16 May 2024 02:38:14 +0500 Subject: [PATCH 16/21] Additional logs. (#103) --- tap_quickbooks/quickbooks/__init__.py | 3 ++- tap_quickbooks/quickbooks/rest.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 8f58e7b..b218a5c 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -487,9 +487,10 @@ def _build_query_string( # order_by = " ORDERBY {} ASC".format(replication_key) # if order_by_clause: # return query + where_clause + end_date_clause + order_by - + LOGGER.info(f"Executing query {query + where_clause + end_date_clause}") return query + where_clause + end_date_clause else: + LOGGER.info(f"Executing query {query}") return query def query(self, catalog_entry, state, state_passed): diff --git a/tap_quickbooks/quickbooks/rest.py b/tap_quickbooks/quickbooks/rest.py index 9b23604..90d38ad 100644 --- a/tap_quickbooks/quickbooks/rest.py +++ b/tap_quickbooks/quickbooks/rest.py @@ -125,6 +125,9 @@ def _sync_records(self, url, headers, params, stream): # Establish number of records returned. count = resp_json['QueryResponse'].get('maxResults', 0) + LOGGER.info( + f"Synced {count} records for URL: {resp.request.url}" + ) # Make sure there is alteast one record. if count == 0: From df74975ebd918388d3e9247eb98cde770e94f896 Mon Sep 17 00:00:00 2001 From: xacadil <92389481+xacadil@users.noreply.github.com> Date: Wed, 3 Jul 2024 10:30:59 -0600 Subject: [PATCH 17/21] Suppliers for GeneralLedgerAccrualReport stream (#108) --- tap_quickbooks/quickbooks/schemas/object_definition.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tap_quickbooks/quickbooks/schemas/object_definition.json b/tap_quickbooks/quickbooks/schemas/object_definition.json index 0159001..ec3f1de 100644 --- a/tap_quickbooks/quickbooks/schemas/object_definition.json +++ b/tap_quickbooks/quickbooks/schemas/object_definition.json @@ -505,7 +505,9 @@ {"name": "KlassId", "type": "string"}, {"name": "Class", "type": "string"}, {"name": "ClassId", "type": "string"}, - {"name": "Categories", "type": "array", "child_type": "string"} + {"name": "Categories", "type": "array", "child_type": "string"}, + {"name": "SupplierId", "type": "string"}, + {"name": "Supplier", "type": "string"} ], "GeneralLedgerCashReport": [ {"name": "Date", "type": "string"}, From 0b77c81a328da6cc076d45d5cc21480e7203f619 Mon Sep 17 00:00:00 2001 From: Keyna Rafael <95432445+keyn4@users.noreply.github.com> Date: Mon, 22 Jul 2024 14:13:08 -0500 Subject: [PATCH 18/21] add aging_method and report_date to aging report (#110) --- tap_quickbooks/__init__.py | 1 + tap_quickbooks/quickbooks/__init__.py | 5 ++++- .../quickbooks/reportstreams/ARAgingSummaryReport.py | 11 ++++++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/tap_quickbooks/__init__.py b/tap_quickbooks/__init__.py index dace4b2..526c5cc 100644 --- a/tap_quickbooks/__init__.py +++ b/tap_quickbooks/__init__.py @@ -307,6 +307,7 @@ def main_impl(): gl_basic_fields = CONFIG.get('gl_basic_fields', False), pnl_adjusted_gain_loss=CONFIG.get("pnl_adjusted_gain_loss", False), pnl_monthly=CONFIG.get("pnl_monthly", False), + ar_aging_report_date=CONFIG.get("ar_aging_report_date", False), ) qb.login() diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index b218a5c..c321267 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -230,7 +230,9 @@ def __init__(self, gl_basic_fields = None, realm_id=None, pnl_adjusted_gain_loss=None, - pnl_monthly=None): + pnl_monthly=None, + ar_aging_report_date=None, + ): self.api_type = api_type.upper() if api_type else None self.report_period_days = report_period_days self.gl_full_sync = gl_full_sync @@ -250,6 +252,7 @@ def __init__(self, self.access_token = None self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss self.pnl_monthly = pnl_monthly + self.ar_aging_report_date = ar_aging_report_date self.base_url = ( "https://sandbox-quickbooks.api.intuit.com/v3/company/" diff --git a/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py b/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py index 0684c94..3c94d6d 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py @@ -39,7 +39,16 @@ def sync(self, catalog_entry): "accounting_method": "Accrual" } - LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']}") + if self.qb.ar_aging_report_date: + report_date = self.qb.ar_aging_report_date.split("T")[0] + params["aging_method"] = "Report_Date" + params["report_date"] = report_date + + if self.qb.ar_aging_report_date: + LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']} with aging_method 'Report_Date' and report_date {report_date}") + else: + LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']}") + resp = self._get(report_entity='AgedReceivables', params=params) # Get column metadata. From 70416bbdbcf24c63616b2377b938fb21cff485cb Mon Sep 17 00:00:00 2001 From: Lomash Sharma <40795579+lomashs09@users.noreply.github.com> Date: Wed, 7 Aug 2024 23:01:25 +0530 Subject: [PATCH 19/21] Add config ar_aging_report_dates for ar aging report to sync data for multiple report dates at once (#111) --------- Co-authored-by: Lomash Sharma --- tap_quickbooks/__init__.py | 1 + tap_quickbooks/quickbooks/__init__.py | 3 +- .../reportstreams/ARAgingSummaryReport.py | 95 ++++++++++--------- .../quickbooks/schemas/object_definition.json | 3 +- 4 files changed, 54 insertions(+), 48 deletions(-) diff --git a/tap_quickbooks/__init__.py b/tap_quickbooks/__init__.py index 526c5cc..06b3eef 100644 --- a/tap_quickbooks/__init__.py +++ b/tap_quickbooks/__init__.py @@ -308,6 +308,7 @@ def main_impl(): pnl_adjusted_gain_loss=CONFIG.get("pnl_adjusted_gain_loss", False), pnl_monthly=CONFIG.get("pnl_monthly", False), ar_aging_report_date=CONFIG.get("ar_aging_report_date", False), + ar_aging_report_dates=CONFIG.get("ar_aging_report_dates", False), ) qb.login() diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index c321267..6c92281 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -232,6 +232,7 @@ def __init__(self, pnl_adjusted_gain_loss=None, pnl_monthly=None, ar_aging_report_date=None, + ar_aging_report_dates=None, ): self.api_type = api_type.upper() if api_type else None self.report_period_days = report_period_days @@ -253,7 +254,7 @@ def __init__(self, self.pnl_adjusted_gain_loss = pnl_adjusted_gain_loss self.pnl_monthly = pnl_monthly self.ar_aging_report_date = ar_aging_report_date - + self.ar_aging_report_dates = ar_aging_report_dates self.base_url = ( "https://sandbox-quickbooks.api.intuit.com/v3/company/" if is_sandbox is True diff --git a/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py b/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py index 3c94d6d..520b037 100644 --- a/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py +++ b/tap_quickbooks/quickbooks/reportstreams/ARAgingSummaryReport.py @@ -1,8 +1,6 @@ import datetime from typing import ClassVar, Dict, List, Optional - import singer - from tap_quickbooks.quickbooks.rest_reports import QuickbooksStream from tap_quickbooks.sync import transform_data_hook @@ -39,56 +37,61 @@ def sync(self, catalog_entry): "accounting_method": "Accrual" } - if self.qb.ar_aging_report_date: - report_date = self.qb.ar_aging_report_date.split("T")[0] - params["aging_method"] = "Report_Date" - params["report_date"] = report_date - - if self.qb.ar_aging_report_date: - LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']} with aging_method 'Report_Date' and report_date {report_date}") + report_dates = [] + if self.qb.ar_aging_report_dates: + for report_date in self.qb.ar_aging_report_dates: + report_dates.append(report_date.split("T")[0]) + elif self.qb.ar_aging_report_date: + report_dates.append(self.qb.ar_aging_report_date.split("T")[0]) else: - LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']}") + report_dates.append(None) # This is to Run the sync once without specific report_date - resp = self._get(report_entity='AgedReceivables', params=params) - - # Get column metadata. - columns = self._get_column_metadata(resp) + for report_date in report_dates: + if report_date: + params["aging_method"] = "Report_Date" + params["report_date"] = report_date + LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']} with aging_method 'Report_Date' and report_date {report_date}") + else: + LOGGER.info(f"Fetch ARAgingSummary Report for period {params['start_date']} to {params['end_date']}") + resp = self._get(report_entity='AgedReceivables', params=params) - # Recursively get row data. - row_group = resp.get("Rows") - row_array = row_group.get("Row") + # Get column metadata. + columns = self._get_column_metadata(resp) - if row_array is None: - return + # Recursively get row data. + row_group = resp.get("Rows") + row_array = row_group.get("Row") - output = [] - for row in row_array: - if "Header" in row: - output.append([i.get('value') for i in row.get("Header", {}).get("ColData", [])]) + if row_array is None: + return - for subrow in row.get("Rows", {}).get("Row", []): - output.append([i.get('value') for i in subrow.get("ColData", [])]) + output = [] + for row in row_array: + if "Header" in row: + output.append([i.get('value') for i in row.get("Header", {}).get("ColData", [])]) - output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) - elif "Summary" in row: - output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) - else: - output.append([i.get('value') for i in row.get("ColData", [])]) + for subrow in row.get("Rows", {}).get("Row", []): + output.append([i.get('value') for i in subrow.get("ColData", [])]) - # Zip columns and row data. - for raw_row in output: - row = dict(zip(columns, raw_row)) - if not row.get("Total"): - # If a row is missing the amount, skip it - continue - - cleansed_row = {} - for k, v in row.items(): - if v == "": - continue + output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) + elif "Summary" in row: + output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) else: - cleansed_row.update({k: v}) - - cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") - - yield cleansed_row + output.append([i.get('value') for i in row.get("ColData", [])]) + + # Zip columns and row data. + for raw_row in output: + row = dict(zip(columns, raw_row)) + row["report_date"] = report_date if report_date else end_date.strftime("%Y-%m-%d") + if not row.get("Total"): + # If a row is missing the amount, skip it + continue + + cleansed_row = {} + for k, v in row.items(): + if v == "": + continue + else: + cleansed_row.update({k: v}) + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + yield cleansed_row diff --git a/tap_quickbooks/quickbooks/schemas/object_definition.json b/tap_quickbooks/quickbooks/schemas/object_definition.json index ec3f1de..c5fa53c 100644 --- a/tap_quickbooks/quickbooks/schemas/object_definition.json +++ b/tap_quickbooks/quickbooks/schemas/object_definition.json @@ -563,7 +563,8 @@ {"name": "31-60", "type": "number"}, {"name": "61-90", "type": "number"}, {"name": "91andover", "type": "number"}, - {"name": "Total", "type": "number"} + {"name": "Total", "type": "number"}, + {"name": "report_date", "type": "string"} ], "ProfitAndLossDetailReport": [ {"name": "Date", "type": "string"}, From 14c3ff7a2659a48744b3232537871a9ff4a19781 Mon Sep 17 00:00:00 2001 From: Lomash Sharma <40795579+lomashs09@users.noreply.github.com> Date: Thu, 15 Aug 2024 03:37:17 +0530 Subject: [PATCH 20/21] Add stream AR Aging Detail report (#113) --------- Co-authored-by: Lomash Sharma --- tap_quickbooks/quickbooks/__init__.py | 3 + .../reportstreams/ArAgingDetailReport.py | 93 +++++++++++++++++++ .../quickbooks/schemas/object_definition.json | 11 +++ 3 files changed, 107 insertions(+) create mode 100644 tap_quickbooks/quickbooks/reportstreams/ArAgingDetailReport.py diff --git a/tap_quickbooks/quickbooks/__init__.py b/tap_quickbooks/quickbooks/__init__.py index 6c92281..06f958d 100644 --- a/tap_quickbooks/quickbooks/__init__.py +++ b/tap_quickbooks/quickbooks/__init__.py @@ -31,6 +31,7 @@ from tap_quickbooks.quickbooks.reportstreams.MonthlyCashFlowReport import MonthlyCashFlowReport from tap_quickbooks.quickbooks.reportstreams.TransactionListReport import TransactionListReport from tap_quickbooks.quickbooks.reportstreams.ARAgingSummaryReport import ARAgingSummaryReport +from tap_quickbooks.quickbooks.reportstreams.ArAgingDetailReport import ARAgingDetailReport from tap_quickbooks.quickbooks.rest import Rest from tap_quickbooks.quickbooks.exceptions import ( @@ -537,6 +538,8 @@ def query_report(self, catalog_entry, state, state_passed): reader = MonthlyCashFlowReport(self, start_date, state_passed) elif catalog_entry["stream"] == "ARAgingSummaryReport": reader = ARAgingSummaryReport(self, start_date, state_passed) + elif catalog_entry["stream"] == "ARAgingDetailReport": + reader = ARAgingDetailReport(self, start_date, state_passed) elif catalog_entry["stream"] == "TransactionListReport": reader = TransactionListReport(self, start_date, state_passed) else: diff --git a/tap_quickbooks/quickbooks/reportstreams/ArAgingDetailReport.py b/tap_quickbooks/quickbooks/reportstreams/ArAgingDetailReport.py new file mode 100644 index 0000000..a9a1570 --- /dev/null +++ b/tap_quickbooks/quickbooks/reportstreams/ArAgingDetailReport.py @@ -0,0 +1,93 @@ +import datetime +from typing import ClassVar, Dict, List, Optional +import singer +from tap_quickbooks.quickbooks.rest_reports import QuickbooksStream +from tap_quickbooks.sync import transform_data_hook + +LOGGER = singer.get_logger() +NUMBER_OF_PERIODS = 3 + +class ARAgingDetailReport(QuickbooksStream): + tap_stream_id: ClassVar[str] = 'ARAgingDetailReport' + stream: ClassVar[str] = 'ARAgingDetailReport' + key_properties: ClassVar[List[str]] = [] + replication_method: ClassVar[str] = 'FULL_TABLE' + + def __init__(self, qb, start_date, state_passed): + self.qb = qb + self.start_date = start_date + self.state_passed = state_passed + + def _get_column_metadata(self, resp): + columns = [] + for column in resp.get("Columns").get("Column"): + if column.get("ColTitle") == "" and column.get("ColType") == "Customer": + columns.append("Customer") + else: + columns.append(column.get("ColTitle").replace(" ", "")) + return columns + + def sync(self, catalog_entry): + LOGGER.info(f"Starting full sync of ARAgingDetail") + end_date = datetime.date.today() + start_date = self.start_date + params = { + "start_date": start_date.strftime("%Y-%m-%d"), + "end_date": end_date.strftime("%Y-%m-%d"), + "accounting_method": "Accrual" + } + + report_dates = [] + if self.qb.ar_aging_report_dates: + for report_date in self.qb.ar_aging_report_dates: + report_dates.append(report_date.split("T")[0]) + elif self.qb.ar_aging_report_date: + report_dates.append(self.qb.ar_aging_report_date.split("T")[0]) + else: + report_dates.append(None) # This is to Run the sync once without specific report_date + + for report_date in report_dates: + if report_date: + params["aging_method"] = "Report_Date" + params["report_date"] = report_date + LOGGER.info(f"Fetch ARAgingDetail Report for period {params['start_date']} to {params['end_date']} with aging_method 'Report_Date' and report_date {report_date}") + else: + LOGGER.info(f"Fetch ARAgingDetail Report for period {params['start_date']} to {params['end_date']}") + resp = self._get(report_entity='AgedReceivableDetail', params=params) + + # Get column metadata. + columns = self._get_column_metadata(resp) + + # Recursively get row data. + row_group = resp.get("Rows") + row_array = row_group.get("Row") + if row_array is None: + return + + output = [] + for row in row_array: + if "Header" in row: + output.append([i.get('value') for i in row.get("Header", {}).get("ColData", [])]) + + for subrow in row.get("Rows", {}).get("Row", []): + output.append([i.get('value') for i in subrow.get("ColData", [])]) + + output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) + elif "Summary" in row: + output.append([i.get('value') for i in row.get("Summary", {}).get("ColData", [])]) + else: + output.append([i.get('value') for i in row.get("ColData", [])]) + + # Zip columns and row data. + for raw_row in output: + row = dict(zip(columns, raw_row)) + row["report_date"] = report_date if report_date else end_date.strftime("%Y-%m-%d") + + cleansed_row = {} + for k, v in row.items(): + if v == "": + continue + else: + cleansed_row.update({k: v}) + cleansed_row["SyncTimestampUtc"] = singer.utils.strftime(singer.utils.now(), "%Y-%m-%dT%H:%M:%SZ") + yield cleansed_row diff --git a/tap_quickbooks/quickbooks/schemas/object_definition.json b/tap_quickbooks/quickbooks/schemas/object_definition.json index c5fa53c..27545bf 100644 --- a/tap_quickbooks/quickbooks/schemas/object_definition.json +++ b/tap_quickbooks/quickbooks/schemas/object_definition.json @@ -566,6 +566,17 @@ {"name": "Total", "type": "number"}, {"name": "report_date", "type": "string"} ], + + "ARAgingDetailReport": [ + {"name": "Date", "type": "string"}, + {"name": "TransactionType", "type": "string"}, + {"name": "Num", "type": "string"}, + {"name": "Customer", "type": "string"}, + {"name": "DueDate", "type": "string"}, + {"name": "Amount", "type": "number"}, + {"name": "OpenBalance", "type": "number"}, + {"name": "report_date", "type": "string"} + ], "ProfitAndLossDetailReport": [ {"name": "Date", "type": "string"}, {"name": "TransactionType", "type": "string"}, From 39a2e4c892a0208398788f75a848f0ec5dd49890 Mon Sep 17 00:00:00 2001 From: Lomash Sharma <40795579+lomashs09@users.noreply.github.com> Date: Fri, 20 Sep 2024 17:54:57 +0530 Subject: [PATCH 21/21] Add column number in araging detailed report (#117) Co-authored-by: Lomash Sharma --- tap_quickbooks/quickbooks/schemas/object_definition.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tap_quickbooks/quickbooks/schemas/object_definition.json b/tap_quickbooks/quickbooks/schemas/object_definition.json index 27545bf..cce46a8 100644 --- a/tap_quickbooks/quickbooks/schemas/object_definition.json +++ b/tap_quickbooks/quickbooks/schemas/object_definition.json @@ -566,11 +566,11 @@ {"name": "Total", "type": "number"}, {"name": "report_date", "type": "string"} ], - "ARAgingDetailReport": [ {"name": "Date", "type": "string"}, {"name": "TransactionType", "type": "string"}, {"name": "Num", "type": "string"}, + {"name": "No.", "type": "string"}, {"name": "Customer", "type": "string"}, {"name": "DueDate", "type": "string"}, {"name": "Amount", "type": "number"},