diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4344a1405..4e94f18cc 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,7 +29,7 @@ jobs: if_false: 'hotfix' - name: Checkout - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: get-npm-version id: package-version @@ -46,7 +46,7 @@ jobs: - name: Authenticate to Google Cloud id: auth - uses: google-github-actions/auth@62cf5bd3e4211a0a0b51f2c6d6a37129d828611d # v2.1.5 + uses: google-github-actions/auth@6fc4af4b145ae7821d527454aa9bd537d1f2dc5f # v2.1.7 with: workload_identity_provider: 'projects/101730247931/locations/global/workloadIdentityPools/hedera-registry-pool/providers/hedera-registry-gh-actions' service_account: 'guardian-publisher@hedera-registry.iam.gserviceaccount.com' diff --git a/Methodology Library/CDM/CDM ACM0006/readme.md b/Methodology Library/CDM/CDM ACM0006/readme.md index 5eb192aa8..ee3a3731c 100644 --- a/Methodology Library/CDM/CDM ACM0006/readme.md +++ b/Methodology Library/CDM/CDM ACM0006/readme.md @@ -60,7 +60,7 @@ In the modern landscape of emission reduction initiatives, the value of transpar ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp.IPFS timestamp is coming soon. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp.IPFS timestamp is coming soon. ### Available Roles diff --git a/Methodology Library/CDM/CDM ACM0007/readme.md b/Methodology Library/CDM/CDM ACM0007/readme.md index 3550903db..64e0cc350 100644 --- a/Methodology Library/CDM/CDM ACM0007/readme.md +++ b/Methodology Library/CDM/CDM ACM0007/readme.md @@ -53,7 +53,7 @@ ACM0007 thus enables a practical solution to reduce grid emissions intensity in ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Timestamp: 1706881469.628524368 diff --git a/Methodology Library/CDM/CDM AMS-I.E/readme.md b/Methodology Library/CDM/CDM AMS-I.E/readme.md index dd8f6a9a9..541d60cc9 100644 --- a/Methodology Library/CDM/CDM AMS-I.E/readme.md +++ b/Methodology Library/CDM/CDM AMS-I.E/readme.md @@ -65,7 +65,7 @@ Various methodologies are used to quantify emissions reductions in cookstove pro ## Policy Import -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ## Available Roles diff --git a/Methodology Library/CDM/CDM AMS-II.G/readMe.md b/Methodology Library/CDM/CDM AMS-II.G/readMe.md index fb8fa0346..435f25788 100644 --- a/Methodology Library/CDM/CDM AMS-II.G/readMe.md +++ b/Methodology Library/CDM/CDM AMS-II.G/readMe.md @@ -56,7 +56,7 @@ AMS-II.G provides a standardized and measurable framework for clean cooking proj ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Policy: 1706881318.979259003 diff --git a/Methodology Library/CDM/CDM AMS-III.BB/AMS-III.BB.policy b/Methodology Library/CDM/CDM AMS-III.BB/AMS-III.BB.policy index 8e151a91a..3fa4b5b34 100644 Binary files a/Methodology Library/CDM/CDM AMS-III.BB/AMS-III.BB.policy and b/Methodology Library/CDM/CDM AMS-III.BB/AMS-III.BB.policy differ diff --git a/Methodology Library/CDM/CDM AMS-III.BB/readme.md b/Methodology Library/CDM/CDM AMS-III.BB/readme.md index d42cdbeb8..bf8c8a28c 100644 --- a/Methodology Library/CDM/CDM AMS-III.BB/readme.md +++ b/Methodology Library/CDM/CDM AMS-III.BB/readme.md @@ -106,7 +106,7 @@ Coming Soon ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Policy: diff --git a/Methodology Library/CDM/CDM AMS-III.D/readme.md b/Methodology Library/CDM/CDM AMS-III.D/readme.md index 8214505c8..272fbb2dc 100644 --- a/Methodology Library/CDM/CDM AMS-III.D/readme.md +++ b/Methodology Library/CDM/CDM AMS-III.D/readme.md @@ -53,7 +53,7 @@ In the context of modern emission reduction projects, the necessity for transpar ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Policy: 1719334564.410514079 diff --git a/Methodology Library/CDM/CDM AMS-III.F/readme.md b/Methodology Library/CDM/CDM AMS-III.F/readme.md index d3bedaac7..94407772a 100644 --- a/Methodology Library/CDM/CDM AMS-III.F/readme.md +++ b/Methodology Library/CDM/CDM AMS-III.F/readme.md @@ -62,7 +62,7 @@ AMS-III.F's emphasis on controlled aerobic treatment of organic waste, notably t ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp = 1719334768.460755003 +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp = 1719334768.460755003 ### Available Roles diff --git a/Methodology Library/CDM/CDM AMS-III.H/readme.md b/Methodology Library/CDM/CDM AMS-III.H/readme.md index 315b7013c..9c50d6d1b 100644 --- a/Methodology Library/CDM/CDM AMS-III.H/readme.md +++ b/Methodology Library/CDM/CDM AMS-III.H/readme.md @@ -50,7 +50,7 @@ In the modern landscape of emission reduction initiatives, transparency and cred ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp = 1719334963.305320577 +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp = 1719334963.305320577 ### Available Roles diff --git a/Methodology Library/GHG Methodology/GHGP Policy/GHGP Policy 2.policy b/Methodology Library/GHG Methodology/GHGP Policy/GHGP Policy 2.policy new file mode 100644 index 000000000..5705041fe Binary files /dev/null and b/Methodology Library/GHG Methodology/GHGP Policy/GHGP Policy 2.policy differ diff --git a/Methodology Library/GHG Methodology/GHGP Policy/readMe.md b/Methodology Library/GHG Methodology/GHGP Policy/readMe.md index 70203e1ca..7e51ece55 100644 --- a/Methodology Library/GHG Methodology/GHGP Policy/readMe.md +++ b/Methodology Library/GHG Methodology/GHGP Policy/readMe.md @@ -46,9 +46,9 @@ Coming Soon ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. -Latest Version - 1707206253.006698003 +Latest Version - 1732046664.657263946 ### Available Roles diff --git a/Methodology Library/GHG Methodology/US Landfill/Attestation of Regulatory Compliance.txt b/Methodology Library/GHG Methodology/US Landfill/Attestation of Regulatory Compliance.txt new file mode 100644 index 000000000..487fb6624 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Attestation of Regulatory Compliance.txt @@ -0,0 +1,18 @@ +var _pp = { + "G6": "2024-05-20", + "G8": "Jane Smith", + "G9": "Director", + "G10": "Environmental Waste Management Facility", + "G11": "Environmental Waste Management Facility (EWMF) Landfill Gas Project", + "G12": "CAR001", + "G13": [ + "2011-01-01" + ], + "G14": [ + "2024-11-07" + ], + "G15": [ + "2024-03-20" + ] +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/Attestation of Title.txt b/Methodology Library/GHG Methodology/US Landfill/Attestation of Title.txt new file mode 100644 index 000000000..3606bfe42 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Attestation of Title.txt @@ -0,0 +1,21 @@ +var _pp = { + "G6": "2024-05-20", + "G8": "Jane Smith", + "G9": "Director", + "G10": "Environmental Waste Management Facility", + "G11": "Environmental Waste Management Facility (EWMF) Landfill Gas Project", + "G12": "CAR001", + "G13": [ + "ipfs://672cef9f4e8d236241e0fd81" + ], + "G14": [ + "2024-11-07" + ], + "G15": [ + "2024-03-20" + ], + "G16": [ + "2024-05-20" + ] +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/Attestation of Voluntary Implementation.txt b/Methodology Library/GHG Methodology/US Landfill/Attestation of Voluntary Implementation.txt new file mode 100644 index 000000000..3606bfe42 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Attestation of Voluntary Implementation.txt @@ -0,0 +1,21 @@ +var _pp = { + "G6": "2024-05-20", + "G8": "Jane Smith", + "G9": "Director", + "G10": "Environmental Waste Management Facility", + "G11": "Environmental Waste Management Facility (EWMF) Landfill Gas Project", + "G12": "CAR001", + "G13": [ + "ipfs://672cef9f4e8d236241e0fd81" + ], + "G14": [ + "2024-11-07" + ], + "G15": [ + "2024-03-20" + ], + "G16": [ + "2024-05-20" + ] +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/Emission Reductions.txt b/Methodology Library/GHG Methodology/US Landfill/Emission Reductions.txt new file mode 100644 index 000000000..3d3432e58 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Emission Reductions.txt @@ -0,0 +1,106 @@ +var _pp = { + "G6": { + "G9": 28, + "G10": 0.1, + "G11": 0, + "G13": [ + { + "G5": "Device 1", + "G8": [ + { + "G5": "monthly", + "G6": "No", + "G14": 0.5, + "G9": 272460000 + } + ], + "G19": 0.8 + } + ], + "G30": "Yes, methane was collected and destroyed at some point prior to the project.", + "G32": "Yes, the system was removed or dormant.", + "G34": "No, the system has been consistently active.", + "G35": "Yes, I will aggregate Destbase weekly or more frequently.", + "G36": "Yes, the aggregation period is consistent.", + "G39": "Yes, the project is a flare project at a closed landfill.", + "G87": "Yes, there is a non-qualifying combustion device.", + "G135": "Yes, a new destruction device is being used.", + "G43": [ + { + "G5": 0.567, + "G6": 48 + }, + { + "G5": 0.553, + "G6": 75 + }, + { + "G5": 0.581, + "G6": 21 + } + ], + "G91": [ + { + "G5": 0.567, + "G6": 48 + }, + { + "G5": 0.553, + "G6": 75 + }, + { + "G5": 0.581, + "G6": 21 + } + ], + "G138": [ + { + "G5": 2005, + "G6": "hourly", + "G7": 900, + "G8": 1000, + "G9": 0, + "G10": 1 + }, + { + "G5": 2005, + "G6": "hourly", + "G7": 900, + "G8": 1000, + "G9": 0, + "G10": 1 + } + ] + }, + "G148": { + "G7": [ + { + "G5": "diesel", + "G6": 32500025, + "G7": 0.78 + }, + { + "G5": "coal", + "G6": 215000, + "G7": 0.98 + } + ], + "G12": 65200, + "G13": 0.8, + "G15": [ + { + "G5": "Device 1", + "G6": 20, + "G7": 0.8 + }, + { + "G5": "Device 2", + "G6": 30, + "G7": 0.8 + } + ], + "G19": 0.5, + "G20": 28 + } +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/NOVA COI.txt b/Methodology Library/GHG Methodology/US Landfill/NOVA COI.txt new file mode 100644 index 000000000..66bd5191a --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/NOVA COI.txt @@ -0,0 +1,169 @@ +var _pp = { + "G7": "John Smith", + "G8": "+1 (555) 123-4567", + "G9": "johnsmith@environmentfirst.com", + "G10": "EWMF Landfill Gas Destruction Project", + "G11": "CAR001", + "G12": "N/A", + "G13": { + "G5": "2023-06-01", + "G6": "2024-05-31" + }, + "G16": "U.S. Landfill Protocol Version 6.0", + "G18": "Yes", + "G19": "Yes", + "G21": [ + { + "G5": "Jane Smith", + "G6": "Director", + "G7": "+1 (555) 987-6543", + "G8": "janesmith@ewmf.com", + "G9": "123 Maple Street Rodman, NY 13682" + } + ], + "G27": [ + { + "G5": "Jane Smith", + "G6": "Director", + "G7": "+1 (555) 987-6543", + "G8": "janesmith@ewmf.com", + "G9": "123 Maple Street Rodman, NY 13682" + } + ], + "G33": [ + { + "G5": "John Doe", + "G6": "Facility Owner", + "G7": "+1 (555) 658-6985", + "G8": "johndoe@ewmf.com", + "G9": "123 Maple Street Rodman, NY 13682" + } + ], + "G39": [ + { + "G5": "N/A", + "G6": "N/A", + "G7": "N/A", + "G8": "N/A", + "G9": "N/A" + } + ], + "G45": [ + { + "G5": "Samantha James", + "G6": "SustainGuard", + "G7": "281 598-5682", + "G8": "sjames@sustainguard.com", + "G9": "123 Green Street Rodman, NY 13682" + } + ], + "G51": [ + { + "G5": "N/A", + "G6": "N/A", + "G7": "N/A", + "G8": "N/A", + "G9": "N/A" + } + ], + "G58": "05/10/2023", + "G59": "05/11/2023, 05/21/2023", + "G60": "06/20/2024", + "G61": "First verification done by your organization for this specific ", + "G63": [ + { + "G5": "Environmental Waste Management Facility", + "G6": "123 Maple Street Rodman, NY 13682" + } + ], + "G66": "No", + "G68": "Yes", + "G69": "The verification activities for this project will include a comprehensive review of all key operational and environmental data to ensure compliance with the project requirements and accurate emissions reduction reporting. Key activities planned are as follows: Staff Interviews: We will conduct interviews with project managers, operations supervisors, and data management staff to understand the procedures and practices related to emissions data collection and processing. Document and Record Review: Review facility maintenance logs and operational records to confirm equipment uptime and efficiency. Examine emissions reduction data, including flow meter readings and methane concentration measurements, to validate recorded reductions. Cross-check calibration records for continuous monitoring equipment to ensure equipment accuracy and maintenance. Emissions Reductions Calculation Review: Verify that emissions reductions are calculated following the CAR Protocol methodology. Recalculate emissions reductions for select reporting intervals to confirm consistency with documented data and methodology. On-Site Inspection and Equipment Verification: Inspect destruction devices (e.g., thermal oxidizers and engines) to verify operational status and proper function. Confirm that installed flow meters and gas analyzers match documentation and meet required calibration frequencies. This planned approach ensures a thorough evaluation of operational practices, data integrity, and emissions reduction calculations to provide an accurate and complete verification report.", + "G70": "N/A", + "G71": "N/A", + "G72": "CAR001", + "G73": "CAR001", + "G74": [ + "ipfs://672cecea4e8d236241e0fd7e" + ], + "G76": "No", + "G77": [], + "G86": "No", + "G87": "No", + "G88": "No", + "G89": "No", + "G90": "No", + "G91": "No", + "G92": "No", + "G94": [], + "G103": { + "G5": "John Smith", + "G6": "555-854-2525", + "G7": "johnsmith@environmentfirst.com", + "G8": "NY USA", + "G9": "N/A", + "G10": "03/20/2024-05/20/2024", + "G11": "Environment First" + }, + "G111": { + "G5": "Sam Green", + "G6": "555-625-3265", + "G7": "samgreen@environmentfirst.com", + "G8": "NY USA", + "G9": "N/A", + "G10": "03/20/2024-05/20/2024", + "G11": "Environment First" + }, + "G119": [ + { + "G5": "N/A", + "G6": "N/A", + "G7": "N/A", + "G8": "N/A", + "G9": "N/A", + "G10": "N/A", + "G11": "N/A" + } + ], + "G128": "$25,000", + "G129": "$100,000", + "G130": "$75,000", + "G131": "$50,000 / $45,000", + "G132": "10%", + "G133": "5%", + "G135": [ + { + "G5": "Not applicable", + "G6": { + "G5": "2023-06-01", + "G6": "2024-05-31" + }, + "G9": "03/20/2024-05/20/2024", + "G10": "$15,000", + "G11": "3%", + "G12": "Technical consultation on GHG methods" + } + ], + "G144": [ + { + "G5": "N/A", + "G6": "N/A", + "G7": "N/A", + "G8": "N/A", + "G9": "N/A" + } + ], + "G150": [ + { + "G5": "N/A", + "G6": "N/A", + "G7": "N/A", + "G8": "N/A", + "G9": "N/A" + } + ], + "G157": "Low", + "G161": [], + "G167": [] +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/Project Data Report.txt b/Methodology Library/GHG Methodology/US Landfill/Project Data Report.txt new file mode 100644 index 000000000..37a435883 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Project Data Report.txt @@ -0,0 +1,60 @@ +var _pp = { + "G6": "Environmental Waste Management Facility", + "G7": "CAR001", + "G8": "N/A", + "G9": "06/01/2023 - 05/31/2024", + "G10": "U.S. Landfill Protocol Version 6.0", + "G11": "2023 - 880 CRTs", + "G12": "This project is located at Johnson County Landfill in Rodman, New York. The baseline scenario involves uncontrolled methane emissions from landfill gas. The project activity includes capturing and destroying methane emissions via utility flares.", + "G13": "This is a new landfill methane capture project utilizing an enclosed flare with a site-specific destruction efficiency value of 99%. All landfill gas is destroyed onsite.", + "G14": "EWMF LLC holds legal control over the project boundary through an agreement with EWMF, Inc.", + "G15": { + "G5": "123 Main St, Rodman, NY 13682", + "G6": "2023-06-01", + "G7": { + "G5": "2023-06-01", + "G6": "2024-05-31" + }, + "G10": { + "G5": "Installation of a landfill gas collection and a new qualifying destruction device at an eligible landfill where landfill gas has never been collected and destroyed prior to the start date.", + "G6": "The project meets the standard test under Section 3.4.1 by reducing methane emissions from the landfill.", + "G7": "Not applicable.", + "G8": "The project activities are not legally mandated, and the landfill is not subject to NMOC emissions regulations." + }, + "G15": "The project complies with all local labor and safety laws.", + "G16": "The project is in compliance with all environmental laws and is designed to prevent pollutant releases.", + "G17": "New York Department of Environmental Conservation oversees project compliance. There were no non-compliance issues during the reporting period." + }, + "G29": { + "G6": [ + { + "G6": "2023", + "G7": "1,200 tons", + "G8": "10%", + "G9": "0.9", + "G10": "1,080 tons CO₂e" + } + ], + "G14": "Testing conducted by New York Environmental Services on April 10, 2023, achieving a 99% destruction efficiency.", + "G15": [ + { + "G6": "2023", + "G7": "100 tons CO₂e", + "G8": "80 tons CO₂e", + "G9": "20 tons CO₂e", + "G10": "200 tons CO₂e" + } + ], + "G22": [ + { + "G6": "2023", + "G7": "1,080 tons CO₂e", + "G8": "200 tons CO₂e", + "G9": "880 CRTs" + } + ] + }, + "G54": "All monitoring and QA/QC requirements have been met.", + "G55": "Field checks and calibrations were completed by a third-party on May 15, 2023." +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/Project Submittal.txt b/Methodology Library/GHG Methodology/US Landfill/Project Submittal.txt new file mode 100644 index 000000000..826038f2e --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Project Submittal.txt @@ -0,0 +1,48 @@ +var _pp = { + "G6": "Environmental Waste Management Facility (EWMF)", + "G7": [ + "EWMF Landfill Gas Destruction Project" + ], + "G8": "CAR001", + "G9": "Version 6.0", + "G10": "First crediting period", + "G11": "Green Solutions Consulting", + "G12": "Local Environmental Authority, Municipality of Johnson County", + "G13": "Environmental Waste Management Facility (EWMF)", + "G14": "Phone: (555) 123-4567 Email: info@ewmf-usa.com", + "G15": "2024-08-15", + "G17": "2023-06-01", + "G24": "Johnson County Landfill", + "G25": "1234 Landfill Road, Johnson County, USA", + "G26": "The EWMF Landfill Gas Destruction Project captures and destroys methane gas generated at Johnson County Landfill. The project involves the installation of a gas collection and control system (GCCS) to capture landfill gas (LFG) and a high-temperature flare for methane destruction. This system is expected to reduce greenhouse gas (GHG) emissions by converting methane into CO2, a less potent GHG. This project aims to contribute to the local government’s environmental sustainability goals while generating carbon credits. The landfill is designed to accommodate municipal solid waste (MSW) and is projected to remain operational until 2030. EWMF's project is in line with regulatory requirements and adheres to best practices for landfill gas management.", + "G27": "Environmental Waste Management Facility (EWMF)", + "G28": "Environmental Waste Management Facility (EWMF)", + "G29": "Municipal Solid Waste (MSW) and limited construction debris", + "G30": "5 million cubic meters", + "G31": "05/10/2022", + "G32": "2005", + "G33": "Estimated to close in 2030", + "G34": { + "G5": "Yes", + "G6": "Passive flares were installed in response to local odor complaints but did not meet criteria for effective GHG destruction.", + "G7": "Yes", + "G8": "A high-efficiency candlestick flare with a destruction efficiency of 98% was installed to meet GHG reduction standards." + }, + "G39": { + "G5": "No", + "G7": "No", + "G9": "Yes", + "G10": "07/20/2023", + "G11": "28 Mg/year", + "G12": "Expected in 2025", + "G13": "No", + "G15": "Yes", + "G17": "Yes" + }, + "G53": "The project is part of EWMF's commitment to environmental responsibility and local community engagement. Regular monitoring reports will be submitted to ensure transparency and compliance with GHG reduction protocols.", + "G18": { + "G5": "2023-06-01", + "G6": "2024-05-31" + } +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol Import Ready.xlsx b/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol Import Ready.xlsx new file mode 100644 index 000000000..eab6ff703 Binary files /dev/null and b/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol Import Ready.xlsx differ diff --git a/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol.policy b/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol.policy new file mode 100644 index 000000000..a7b5147c6 Binary files /dev/null and b/Methodology Library/GHG Methodology/US Landfill/U.S. Landfill Protocol.policy differ diff --git a/Methodology Library/GHG Methodology/US Landfill/Verification Report.txt b/Methodology Library/GHG Methodology/US Landfill/Verification Report.txt new file mode 100644 index 000000000..f81c06d86 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/Verification Report.txt @@ -0,0 +1,95 @@ +var _pp = { + "G5": "CAR001", + "G6": "Environmental Waste Management Facility (EWMF) Landfill Gas Project", + "G7": "Environment First", + "G8": "This report is provided to the Project Developer as a deliverable of the Climate Action Reserve (CAR) project verification process. This report covers the verification of the Landfill Gas Destruction Project – CAR001 (the Project) for the periods of January 1, 2021 through December 31, 2021. During the verification process, the Project advisor acted as the Project Developer's representative. Both the Project Developer and advisor are the responsible parties for the GHG statement under verification. Environment First is responsible for expressing the opinion below on the GHG statement based on the outcome of the verification process.", + "G9": "The purpose of this verification was, through review of appropriate evidence, to establish that: • the Project conforms to the requirements of the verification criteria, including all eligibility requirements, discussed in Section 4 of this report; and • the data reported are accurate, complete, consistent, transparent, and free of material error or omission. Verification objectives also included reaching a conclusion about the accuracy of the GHG statement and the conformity of the statement with criteria.", + "G11": "New York", + "G12": "Emissions reductions (expressed in units of Carbon Dioxide equivalents (CO2e)) resulting from methane destruction; Project emissions of CO2 from fossil fuel combustion and purchased electricity.", + "G13": [ + { + "G5": "2023-06-01", + "G6": "2024-05-31" + } + ], + "G16": "Metered Data and Emissions Reduction Calculations", + "G18": "• Climate Action Reserve U.S. Landfill Protocol, Version 6.0, June 8, 2022 (CAR Protocol) • Reserve Offset Program Manual (April 2024, v9.2) • U.S. Landfill Project Protocol Version 6.0 Errata and Clarifications, April 13, 2023 • Applicable Climate Action Reserve Policy Memos", + "G19": "• Section 8 of the CAR Protocol, Version 6.0 • Climate Action Reserve Verification Program Manual, February 3, 2021 • ISO 14064-3: Specification with guidance for the verification and validation of greenhouse gas statements, 2019", + "G20": "Reasonable Level of Assurance", + "G21": "• Misstatements greater than one percent of the Project’s emission reductions assertion are considered material. • Qualitative non-conformities with the CAR Protocol are also considered material.", + "G23": "The verification process was utilized to gain an understanding of the Project’s emission sources and reductions and to evaluate and verify the collection and handling of data, the calculations that lead to the results, and the means for reporting the associated data and results. The following verification process was used: • conflict of interest review; • selection of Audit Team; • verification preparations; • initial interaction and kickoff meeting with DANC and B&L; • development of the verification and evidence gathering plan; • execution; • site visit; • assessment of raw data and calculations for period under review; • follow-up interaction with the project developer and B&L for corrective action, clarification, or supplemental data as needed; • Independent review; and • final opinion and report submittal.", + "G24": "Prior to beginning any verification project, First Environment conducts an evaluation to identify any potential conflicts of interest associated with the Project. No potential conflicts were found for this Project. First Environment also received authorization to proceed with verification activities for the Project developer from CAR in a notification dated August 30, 2022.", + "G25": "Environment First’s Verification Team consisted of the following individuals, who were selected based on their verification experience and familiarity with landfill operations: • Lead Verifier – John Smith • Verifier – Alex Johnson • Senior Internal Reviewer – Sam Green", + "G26": "Environment First performed pre-engagement activities to confirm the type of engagement, level of assurance, materiality thresholds, the objectives, criteria, and scope for verification activities. Environment First also performed a strategic analysis to understand the activities and complexity of the project and to determine the nature and extent of the verification activities required. The strategic analysis was also used to develop sufficient understanding of the GHG-related activity and its relevant sector information to plan and conduct the verification.", + "G27": "The verification was initiated with a kick-off meeting on September 8, 2022. The meeting focused on confirming the scope, schedule, and data required for verification.", + "G28": "The Audit Team formally documented its verification plan and determined the evidence-gathering plan. The verification plan was developed based on the discussion of key elements of the project verification process during the kick-off meeting. The project developer was afforded the opportunity to comment on the key elements of the verification plan. Based on items discussed and agreed upon with the project developer, the plan identified the Environment First project team members, project level of assurance, materiality threshold, and standards of evaluation and reporting for the verification. It also provided an outline of the verification process, established project deliverables, and presented a data-evidence gathering plan designed to review all project elements in areas of potentially high risk of inaccuracy or non-conformance.", + "G29": "The Audit Team executed the evidence-gathering activities. The verification was conducted according to the verification plan and the evidence-gathering activities according to the evidence-gathering plan.", + "G30": "Mr. John Smith conducted a site visit on October 13, 2022 to assess the Project’s data management systems and interview personnel relevant to the Project as part of the verification process.", + "G31": "This assessment used information and insights gained during the previous steps to evaluate the collected data and the reported emissions reduction quantities and identify if either contained material or immaterial misstatements.", + "G32": "The team issued corrective action and clarification requests during the verification process. The Project Developer provided sufficient responses to all of the corrective action and clarification requests. ", + "G33": "Before verification reporting is released to the client, a Environment First lead verifier, who has not participated in the verification activities, conducts an independent review to confirm that all verification activities have been completed and provide the agreed upon level of assurance.", + "G34": "Verification reporting, represented by this report, documents the verification process and identifies its findings and results. Verification reporting consists of this report, a verification opinion, and a list of findings, all to be submitted to the Climate Action Reserve.", + "G36": "The Authority operates the Environmental Waste Management Facility (EWMF) located in Rodman, New York. The landfill opened in 1992 and currently receives municipal solid waste. The Project involves the voluntary collection and destruction of landfill gas. Landfill gas is destroyed in the EWMF’s open flare or at the energy facility operated by EcoEnergy Solutions. The energy facility destroys landfill gas in multiple engine gensets, a thermal oxidizer (enclosed flare), and an open flare. Emission reduction credits are not being claimed for gas destroyed by the EWMF’s open flare and the energy facility’s open flare. The Project destroys landfill gas that would otherwise be released into the atmosphere, resulting in a net reduction of CO2-equivalents. No combustion devices were present on-site prior to the Project activity. The baseline scenario is defined as the total release of landfill gas into the atmosphere.", + "G37": "The Project meets the eligibility requirements set forth in the CAR Protocol as described below.", + "G38": "On March 1, 2008, a landfill gas-to-energy development and gas assignment agreement was entered into between Renewable Energy Solutions, Inc. (now EcoEnergy Solutions), and the Authority for a term of twenty years. The agreement specifies that methane destruction and the associated carbon credits are excluded and remain the sole title of the Authority. The agreement also states that it shall be binding and benefit the parties and their respective successors (i.e., EcoEnergy Solutions). Therefore, Environment First concluded that the Authority is correctly identified as the Project Developer and retains title to the associated carbon credits generated by the Project activity. Additionally, Environment First relied on the Attestation of Title completed by the Authority for the current reporting period. The Attestation is on file with CAR and was reviewed to confirm it was completed correctly.", + "G39": "Environment First confirmed that the Project meets CAR’s start date requirements. The Project start date is March 28, 2001, which was previously confirmed by Environment First during the Project’s initial reporting period and was re-confirmed during the current verification. The start date was confirmed through review of flare start-up and commissioning records. The Project is currently within its third crediting period, which spans from March 28, 2021, through March 27, 2031.", + "G40": "The Project passes both the Performance Standard Test and the Legal Requirements Test, as described below.", + "G41": "The Project consists of the installation of a landfill gas collection and control system and therefore exceeds the performance standard defined by the CAR protocol, specifically Scenario 1 from the list provided in Section 3.4.1 of the CAR Protocol, because no collection or control system existed prior to the Project start date. The baseline scenario was confirmed through site visit interviews with Project personnel. It was also confirmed during the site visit that the landfill is not a bioreactor.", + "G42": "The project is not claiming any other credits or incentives for methane destruction activities and is not a participant in the Regional Greenhouse Gas Initiative (RGGI) program. The project activity generates Renewable Energy Certificates (RECs). However, carbon dioxide emission reductions associated with displacement or offsetting of fossil generated electricity are excluded from the GHG assessment boundaries of the Protocol, and therefore there is no stacking of credits relevant to REC generation.", + "G43": "The landfill has a permitted capacity above the 2.5 million cubic meter threshold of municipal solid waste that triggers New Source Performance Standard (NSPS) requirements. As such, the most recent non-methane organic compounds (NMOC) emissions rate testing occurred on August 3, 2021. The landfill was modified by a permitted expansion after July 17, 2014; therefore, the most recent emissions rate report dated January 30, 2023, was prepared in accordance with 40CFR63, Subpart XXX. The report indicates that the NMOC emission rate is estimated to be 11.55 Megagrams (Mg) per year in 2021, and 11.73 Mg per year in 2022, both below the 34 Mg per year threshold established by the NSPS regulation. Environment First reviewed the facility’s solid waste operating permit and Title V permits to confirm that no other requirements for an active landfill gas collection and control system existed. This review of permits, laws, and regulations indicated the voluntary nature of the Project. Additionally, Environment First relied on the Attestation of Voluntary Implementation completed by the Authority for the current reporting period. The Attestation is on file with CAR and was reviewed to confirm it was completed correctly.", + "G44": "Stormwater Permit and the energy plant’s Title V permit compliance reports were generated on October 4, 2023, from the Environmental Protection Agency’s (EPA) Enforcement and Compliance History Online (ECHO) database. The ECHO database searches did not identify any issues during the current reporting period for either the landfill or the energy facility. Additionally, Environment First relied on the Attestation of Regulatory Compliance completed by the Authority for the current reporting period. The Attestation is on file with CAR and was reviewed to confirm it was completed correctly.", + "G45": "The Project was implemented in conformity with the CAR Protocol. The Authority developed and implemented a Monitoring Plan to track relevant Project parameters and data sources. The Monitoring Plan, including the Project Diagram, was reviewed to determine compliance with the protocol requirements. Based on observations made during the site visit and review of relevant Project documentation, Environment First found the Monitoring Plan and Project Diagram to meet the requirements set forth by the CAR Protocol and the Project to be implemented in accordance with the Monitoring Plan, except where noted below.", + "G46": "Landfill gas flow to each destruction device is continuously monitored with flow meters. All flow data is transmitted to a SCADA system data logger that electronically archives the data. Flow data are automatically corrected from ambient conditions to a standard temperature and pressure set in each flow meter's operating parameters. The flow data is exported to Microsoft Excel for review and quantification. See Table 1 below for details of each flow meter. The methane concentration of the landfill gas is continuously measured with a Siemens Ultramat 23 continuous gas analyzer and transmitted to a SCADA system data logger that electronically archives all methane concentration data for export to Microsoft Excel for review and quantification. Both gas flow rates and methane concentration are measured on the same relative moisture basis. Flare temperature is continuously monitored by thermocouples and recorded by the SCADA data logger. Engine operability (kWh production) is monitored and recorded hourly. The data are also exported to Microsoft Excel for review and quantification. Table 1 summarizes the Project monitoring system parameters and monitoring equipment employed by the Project. As discussed in section 6.1 above, and because emission reduction credits are not being claimed for gas destroyed by the SWMF’s open flare and the engine facility's open flare, the flow meters associated with these two flares and the portable gas analyzers used at the SWMF’s flare skid are not identified in the table below.", + "G47": [ + { + "G5": "Landfill gas flow", + "G6": "FCI ST-98 Flow Meter (s/n 418655)", + "G7": "Thermal Oxidizer (Enclosed Flare)", + "G8": "60°F / 1 Atm", + "G9": "Continuous", + "G10": "1 minute", + "G11": "SCADA data logger" + } + ], + "G55": "EWMF’s instrument quality assurance/quality control (QA/QC) plan for the Project’s monitoring equipment complies with CAR Protocol’s requirements, except where noted below: The Project utilizes a total of four meters for the thermal oxidizer and engines (two meters for each destruction device) to facilitate routine calibrations and to ensure there is always a spare meter available for use. Flow meter cleanings/inspections and third-party field calibration accuracy checks within two months of the end of both reporting periods were not documented for all flow meters. As a result, EWMF requested a variance from the Climate Action Reserve to address these issues. The manufacturer of the FCI-ST98 flow meters recommends calibration every eighteen months. All flow meters were used within their recommended calibration frequency, with the exception of engine meter s/n 259577, which was in use past its recommended calibration frequency. As a result, EWMF requested a variance from the Climate Action Reserve to address this issue. All flow meters were calibrated to the range of conditions expected at the site, as confirmed through a review of manufacturer calibration certificates.", + "G56": [ + { + "G5": "FCI ST-98 Flow Meter (s/n 418655)", + "G6": "Thermal Oxidizer (Enclosed Flare)", + "G7": { + "G5": "2023-07-01", + "G6": "2024-08-17" + }, + "G10": "See Variance Section", + "G11": "See Variance Section", + "G12": "5/10/2021 & 6/13/2023" + } + ], + "G65": "Project emissions sources and the associated monitoring methodology are summarized below.", + "G66": [ + { + "G5": "Consumption of Purchased Electricity ", + "G6": "Utility Invoices" + } + ], + "G69": "The emission reduction calculations were reviewed to ensure accuracy in the formulas used and the raw data employed as inputs. The formulas were tested for consistency with the calculation methodology described in the CAR Protocol. Total landfill gas flow is computed by multiplying the average landfill gas flow rate in a given interval by the length of that interval. Flow totals are aggregated on a daily basis. Metered gas flow volumes are recorded at standard conditions. All recorded landfill gas flow was adjusted as needed in the emission reduction calculations to standard conditions of one atmosphere of pressure and a temperature of 60°F. The total volume of methane destroyed by the Project was computed in daily intervals by multiplying the daily gas flow to the destruction devices by the daily average of methane concentration measurements and converting it to a mass flow using the density of methane at 60°F and one atmosphere of pressure. The total quantity of methane destroyed by the Project is summed over the reporting period to obtain the total baseline emissions. Because one flow meter monitors flow to all four engines, it was confirmed through the review of raw flow data that during any intervals when one or more engines were not operational, the remaining engines had the available capacity to destroy all of the available landfill gas, and that engine output corresponded to the flow of the gas. Additionally, it was confirmed during the site visit that the engines are designed so that it is physically impossible for gas to pass through while the engine(s) are non-operational; that each engine is equipped with an automatic safety shut-off valve; and that each engine has identical destruction efficiency.", + "G70": [ + { + "G5": "LFGi,t", + "G6": "Total quantity of landfill gas sent to the destruction devices ", + "G7": "Summed daily from metered data; Engine flow is corrected to reference conditions of 60ºF and one atmosphere of pressure in calculations. Thermal Oxidizer flow is metered at reference conditions of 60ºF " + } + ], + "G74": "As discussed in above, EWMF requested a variance from the CAR Protocol for the current reporting periods to address the following issues: • Missing cleaning/inspections of the flow meters and continuous gas analyzer, • Missing field accuracy checks of the flow meters within two months of the end of each reporting period, and • Use of engine meter s/n 259577 past its recommended calibration frequency. The variance request was approved by CAR on May 23, 2024, for the period of January 1, 2021, through December 31, 2022. The verifier confirms that the 2021 and 2022 calibrations for all four flow meters, as indicated above, that occurred prior to installation had ‘as left’ conditions within the ±5% accuracy threshold. Calibration Confirmation: Environment First confirmed that all of the meters and calibration dates identified in the Variance Determination had \"as left\" drift conditions that were within the 5% accuracy threshold. The verifier confirms that the following calibrations have “as found” conditions within the ±5% accuracy threshold: 6/12/2023 (S/N 502730): as found = -2.01% 11/13/2023 (S/N 503587): as found = -2.13% In the absence of the “as-found” readings for the 3/15/2022 calibration of flow meter S/N 259577, EWMF shall apply a discount to data from 9/17/2020-10/28/2021 equal to the maximum drift from the “as found” conditions available from the calibrations for the flow meter. A discount has been applied for the time period this meter was used prior to the March 15, 2022 calibration. The discount is applied from the beginning of the reporting period through the date it was removed from service (7/1/2021 - 10/28/2021). The discount applied is 1.66%, based on the maximum drift from the \"as found\" conditions from the November 13, 2023 calibration. The verifier confirms that the hypothetical drift and scaling for flow meter S/N 259577 is applied appropriately. Environment First confirmed that the hypothetical drift was applied correctly. No drift is applied for data prior to the 6/14/2023 calibration for flow meter S/N 418655 due to underreporting flow. Environment First confirmed that no drift adjustment was applied for data prior to the June 14, 2023 calibration of the thermal oxidizer meter S/N 418655. The verifier confirms that the monitoring plan specifies the appropriate cleaning and inspection and field check schedules to ensure that future reporting periods will meet the protocol QA/QC requirements. Environment First confirmed that Table 3 of the Monitoring Plan contains sufficient detail and description of the type and frequency of meter maintenance activities consistent with Protocol requirements. The verifier confirms that all other QA/QC requirements have been met. Environment First confirmed that all other conditions have been met.", + "G75": "Based on the historical evidence collected and the assessments performed, Environment First concludes that the Project’s GHG emissions reductions achieved through the collection and combustion of landfill gas for the periods of June 01, 2023 through May 31, 2024 can be considered with a reasonable level of assurance: • in conformance with the verification criteria, and • without material discrepancy.", + "G76": { + "G5": { + "G5": "2023-06-01", + "G6": "2024-05-31" + }, + "G8": 1080, + "G9": 200, + "G10": 880 + } +} +Object.values(__request).forEach(r=>r.preset(_pp)); \ No newline at end of file diff --git a/Methodology Library/GHG Methodology/US Landfill/readme.md b/Methodology Library/GHG Methodology/US Landfill/readme.md new file mode 100644 index 000000000..578c717f5 --- /dev/null +++ b/Methodology Library/GHG Methodology/US Landfill/readme.md @@ -0,0 +1,241 @@ +## Climate Action Reserve’s U.S. Landfill Protocol Version 6.0 + +## Table of Contents + + +- Introduction +- Need and Use +- Monitoring and Quantification Approach +- Project Eligibility and Additionality +- Project Type +- Demo Video +- Policy Workflow +- Policy Import +- Available Roles +- Important Schemas +- Token (Climate Reserved Tonnes - CRTs) +- Step-by-Step + + + +## Introduction + +The U.S. Landfill Protocol, developed by the Climate Action Reserve, is a standardized framework for quantifying, reporting, and verifying greenhouse gas (GHG) emission reductions from methane capture and destruction projects at U.S. landfills. Approved by the Integrity Council for the Voluntary Carbon Market (ICVCM), this protocol ensures that emission reductions meet high standards of environmental integrity, credibility, and transparency. This ICVCM approval reinforces the protocol’s alignment with global best practices and its credibility in voluntary carbon markets. + +## Need and Use + +The U.S. Landfill Protocol supports project developers in creating projects that effectively reduce emissions from landfill sites. It includes a framework for calculating and verifying emission reductions generated by capturing methane, a potent greenhouse gas, from landfills and using or destroying it. This protocol helps landfill operators and project developers earn credits that can be used to offset emissions in compliance and voluntary carbon markets. + +## Monitoring and Quantification Approach + +The U.S. Landfill Protocol specifies detailed requirements for calculating baseline and project emissions to assess net GHG emission reductions. Key components include: + +**Methane Capture and Destruction:** Continuous monitoring of landfill gas (LFG) collection and destruction is required, using calibrated equipment to measure LFG flow and methane concentration, ensuring accurate quantification of emissions destroyed. + +**Baseline Emissions:** Baseline emissions are calculated based on the methane emissions that would have occurred without the project, minus any methane oxidized by soil bacteria or destroyed by any existing destruction devices. These calculations use specific discount factors for oxidation and other adjustments according to landfill characteristics and monitoring frequency. + +**Project Emissions:** Project emissions include any emissions from the energy used to operate the landfill gas collection and destruction systems, as well as emissions from fossil fuels or supplemental energy sources. + +**Emission Reductions:** Net emission reductions are calculated by subtracting project emissions from baseline emissions, providing a clear assessment of GHG reductions achieved through methane destruction. + +This structured approach allows for consistent, transparent, and conservative reporting of GHG emission reductions in line with the Climate Action Reserve's requirements. + +## Project Eligibility and Additionality + +Projects must meet specific eligibility requirements to qualify under this protocol: + +- **Location:** Projects must be located at landfills in the U.S., including tribal lands and territories. + +- **Project Start Date:** The project start date should be defined by the first instance of landfill gas destruction, with project submission required within 12 months of this date. + +- Additionality is demonstrated through the performance standard test (based on baseline conditions) and the legal requirement test (ensuring no regulatory mandates already require methane destruction). Only landfill projects that collect and destroy methane beyond regulatory requirements qualify for additionality, ensuring that GHG reductions are surplus to business-as-usual scenarios. + +## Project Type + +This protocol applies to projects that aim to reduce methane emissions from U.S. landfills by capturing and utilizing or destroying methane gas. Only projects that meet the Climate Action Reserve’s eligibility criteria and have not issued credits under other GHG programs are eligible. + +## Demo Video + +[Youtube](https://www.youtube.com/watch?v=fDTobFguBJE) + +## Policy Workflow + +The policy workflow replicates the procedural steps required for landfill gas projects, covering project registration, additionality demonstration, and the submission of monitoring reports to claim carbon credits. + +![image](https://github.com/user-attachments/assets/dbdb7733-e474-45d4-b15c-35413a1a0980) + +## Policy Import + +This policy is available for import via GitHub or IPFS timestamp. + +IPFS Timestamp: 1732046103.613084486 + +## Available Roles + +**Project Developer:** Responsible for overall project management, documentation submission, and coordination with verification bodies. + +**Verifier(s):** Independent third-party verifiers who assess project data and ensure compliance with protocol requirements. + +**Standard Registry:** The governing body that maintains project records, manages reporting, and approves credit issuance. + +## Important Schemas + +**Project Submission Form:** This form provides a comprehensive overview of the project, including landfill site details, baseline emissions, monitoring plans, and stakeholder consultations. It serves as the foundational document for project registration with the Climate Action Reserve. + +**Verification Report:** Submitted by an independent third-party verifier, this report evaluates the accuracy of the emissions reductions claimed by the project. It includes findings on project compliance, monitoring data, and any identified discrepancies or recommendations for improvement. + +**Emission Reductions Report:** This document provides a summary of the emission reductions accomplished by the project. It details the methodologies used, calculations used, and calculation results. + +**Attestation of Title Form:** Signed by the Project Developer, this form certifies ownership of the project’s GHG reductions. Clear ownership documentation ensures that CRTs are issued only to entities with verified rights to claim these reductions. + +**Attestation of Voluntary Implementation:** This attestation is signed by the Project Developer to confirm that the project’s landfill gas collection and destruction activities go beyond any regulatory requirements. It is submitted prior to verification, ensuring that GHG reductions are genuinely additional to what would have occurred under existing legal mandates. + +**Attestation of Regulatory Compliance:** This attestation confirms that the project complies with all applicable federal, state, and local laws and regulations (e.g., environmental, safety). The Project Developer must disclose any instances of non-compliance to the verifier, who assesses the impact on credit issuance. + +**Environmental Safeguards:** This schema ensures that project activities align with environmental best practices, minimizing unintended impacts on local ecosystems, air, water, and soil quality. The Project Developer must document steps taken to prevent adverse environmental effects, such as controlling potential pollutants, managing resource use, and protecting nearby habitats. This schema may also include periodic environmental impact assessments as part of ongoing compliance and verification. + +## Token (Climate Reserve Tonnes - CRTs) + +Each CRT represents one metric tonne of carbon dioxide equivalent (tCO₂e) reduced or avoided through the approved project activities. + +### Step By Step + +1. Import the policy using IPFS or Policy File. Once imported, you will be redirected to the policy configurator. + +![image](https://github.com/user-attachments/assets/8c5ab2a7-dc64-481d-9664-e4908c0d3d8c) + +![image](https://github.com/user-attachments/assets/636333c0-b93e-4cc4-aaed-5bb2c51201d9) + +![image](https://github.com/user-attachments/assets/6b0f9fba-5e35-423f-b671-c6a83b3850bc) + +2. Set the policy to Dry Run or Publish it using the dropdown. Then select “Go” or “Register”. + +![image](https://github.com/user-attachments/assets/8dc8d7fa-46a4-4326-bd6e-60d67ec027d4) + +![image](https://github.com/user-attachments/assets/2169c25d-4211-4fdc-a660-6041fdf22787) + +3. Create a new user account and assign their role as the Project Developer. + +![image](https://github.com/user-attachments/assets/2e4ae3d1-bb43-4d48-934d-bbeefd953e81) + +![image](https://github.com/user-attachments/assets/8a9b790e-8e62-4640-9db0-2730a1dd21f5) + +![image](https://github.com/user-attachments/assets/853b683d-359d-4499-a997-dc1461db418a) + +![image](https://github.com/user-attachments/assets/05c26582-aeb2-4910-baca-a932eb85e5a5) + +4. Log in to the administrator account to review and approve the Project Developer account. + +![image](https://github.com/user-attachments/assets/4d63751b-67e3-4d70-b528-f38604673bfd) + +5. The Project Developer can click 'Create Project' to submit the Project Submission Form. + +![image](https://github.com/user-attachments/assets/2c92b4ad-490e-487c-983a-a90e4182654c) + +![image](https://github.com/user-attachments/assets/7c7f9dd4-a90e-4907-a53d-f446b467083e) + +6. The Project Developer can also submit a Project Diagram. + +![image](https://github.com/user-attachments/assets/02f83792-764d-437a-a849-f023a2565bdc) + +![image](https://github.com/user-attachments/assets/e61af0df-2468-4a1d-9b1a-ffbef4595753) + +7. Return to the administrator account to review the Project Submission Form and the Project Diagram for completeness and approve both submissions. + +![image](https://github.com/user-attachments/assets/8a70923d-23cd-46cc-816d-247e420c2da4) + +![image](https://github.com/user-attachments/assets/edbef16e-64c4-4fe7-9a87-3fa3152b16e6) + +8. Now, we will create a new user to add a verifier account. + +![image](https://github.com/user-attachments/assets/7d31c1cb-8ab7-4fc6-a568-814ff46b1062) + +![image](https://github.com/user-attachments/assets/a69046a5-90de-49ff-94d3-9fcd0e126cbe) + +![image](https://github.com/user-attachments/assets/7528cfcf-8e77-40d8-9f91-0596dbb8b291) + +![image](https://github.com/user-attachments/assets/28511d2f-5c52-4cb9-992c-ae205d757d8b) + +9. Log in to the Administrator account to review and approve the Verifier account. + +![image](https://github.com/user-attachments/assets/a9d6c8d5-3e36-4f18-a0b4-7d40d50d4ae3) + +10. The Verifier must complete a NOVA/COI form and submit it to the Administrator. + +![image](https://github.com/user-attachments/assets/4e95a8c8-1d24-4f3a-a087-321a1c9325d2) + +11. Log in to the Administrator account and approve the NOVA/COI form. This policy also allows the Administrator to identify a conflict of interest (COI) as outlined in the workflow above. + +![image](https://github.com/user-attachments/assets/e9af5f27-9fa0-430f-ae3f-d241a22d7268) + +12. Once the NOVA/COI form has been approved with no COIs identified you can log in to the Project Developer account and assign a Verifier to the project. This will allow the verifier to see the project submission form. + +![image](https://github.com/user-attachments/assets/bd5d59ab-72e4-4ca2-8476-119ae4ae739c) + +![image](https://github.com/user-attachments/assets/1e3f4919-2997-4f83-9bf3-4bca973cc42a) + +![image](https://github.com/user-attachments/assets/d02fc0ae-3e0c-4229-b8d9-4efc5bf84be1) + +13. The Project Developer can submit all the forms outlined in the project documents tab, assigning the third-party verifier to each form. + +![image](https://github.com/user-attachments/assets/9efe0f93-49af-49d8-ab4c-fe9f11f23230) + +![image](https://github.com/user-attachments/assets/24fec958-78d0-498a-9cac-4e6e548ae13c) + +14. Using the assigned Verifier account, they can now review and approve all the project documents. Once approved they can complete and submit the Verification Report. + +![image](https://github.com/user-attachments/assets/e8bdfa99-286f-43a5-8a3d-a4cec9fea895) + +![image](https://github.com/user-attachments/assets/dfbb9b30-c859-4fc7-ac42-6a1efa057014) + +![image](https://github.com/user-attachments/assets/afcca1de-bc44-4e37-a24e-4718a86e0b94) + +15. The Administrator must review the report for completeness and choose to approve or reject. + +![image](https://github.com/user-attachments/assets/fb49e085-837a-4bba-a176-c917ae0b3de1) + +16. The final step before issuing credits would be to approve the Emission Reductions from the Administrators account. + +![image](https://github.com/user-attachments/assets/993e0b2d-1550-46a8-bab5-99eeb1f064bb) + +17. The project owner and administrator will have access to the Verifiable Presentation (VP) and Trust Chain. + +![image](https://github.com/user-attachments/assets/99fcdceb-ae8f-46e7-9074-7326e5c875e5) + +![image](https://github.com/user-attachments/assets/83eb74d0-1d99-4599-941e-b6399113b646) + +![image](https://github.com/user-attachments/assets/aca752ff-aaae-4d12-8b47-a37d3708a8ff) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Methodology Library/Gold Standard/Gold Standard TPDDTEC v3.1 (contributed by Global Carbon Registry)/policies/GS-TPDDTEC(v3.1.0)/README.md b/Methodology Library/Gold Standard/Gold Standard TPDDTEC v3.1 (contributed by Global Carbon Registry)/policies/GS-TPDDTEC(v3.1.0)/README.md index 20736020d..06603afc7 100644 --- a/Methodology Library/Gold Standard/Gold Standard TPDDTEC v3.1 (contributed by Global Carbon Registry)/policies/GS-TPDDTEC(v3.1.0)/README.md +++ b/Methodology Library/Gold Standard/Gold Standard TPDDTEC v3.1 (contributed by Global Carbon Registry)/policies/GS-TPDDTEC(v3.1.0)/README.md @@ -39,7 +39,7 @@ Global Climate Registry (GCR) has built this Guardian Policy that tokenizes the ## Policy Guide -This policy is published to Hedera network and can either be imported via Github(.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github(.policy file) or IPFS timestamp. Hedera Topic (testnet) - [0.0.4234489](https://explore.lworks.io/testnet/topics/0.0.4234489) diff --git a/Methodology Library/Gold Standard/Metered Energy Cooking/readme.md b/Methodology Library/Gold Standard/Metered Energy Cooking/readme.md index 4314a9f26..256996f49 100644 --- a/Methodology Library/Gold Standard/Metered Energy Cooking/readme.md +++ b/Methodology Library/Gold Standard/Metered Energy Cooking/readme.md @@ -55,7 +55,7 @@ This approach is more precise than traditional methodologies, which rely on more ## Policy Guide -This policy is published to Hedera network and can either be imported via Github(.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github(.policy file) or IPFS timestamp. Latest Version - 0.0.3 Hedera Topic - [0.0.3972127](https://explore.lworks.io/testnet/topics/0.0.3972127) diff --git a/Methodology Library/GoldStandard/Metered Energy Cooking/readme.md b/Methodology Library/GoldStandard/Metered Energy Cooking/readme.md index 9a9abf087..b238416d5 100644 --- a/Methodology Library/GoldStandard/Metered Energy Cooking/readme.md +++ b/Methodology Library/GoldStandard/Metered Energy Cooking/readme.md @@ -49,7 +49,7 @@ This approach is more precise than traditional methodologies, which rely on more ## Policy Guide -This policy is published to Hedera network and can either be imported via Github(.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github(.policy file) or IPFS timestamp. image diff --git a/Methodology Library/Hackathon/UNFCCC_ACM0007/readme.md b/Methodology Library/Hackathon/UNFCCC_ACM0007/readme.md index b44789d83..e3bdaee79 100644 --- a/Methodology Library/Hackathon/UNFCCC_ACM0007/readme.md +++ b/Methodology Library/Hackathon/UNFCCC_ACM0007/readme.md @@ -26,7 +26,7 @@ Entities operating single cycle power plants can use ACM0007 to develop carbon o **Verification and Validation Body (VVB)** - The VVB plays a critical role in independently verifying and validating the project data submitted by the project participant. They thoroughly assess the project's emission reduction potential, methodologies, and adherence to the policy guidelines. Based on their evaluation, the VVB either approves or rejects the project for registration. \ **Registry (UNFCCC)** - The United Nations Framework Convention on Climate Change (UNFCCC) serves as the registry for the CDM. They oversee the multiple workflow steps involved in the project's approval, including the verification and validation process by the VVB and the endorsement by the DNA. The UNFCCC's approval is necessary for the project's successful registration and issuance of CER tokens. ## Technical Aspects of the Project (Policy Guide): -This policy can be imported to hedera guardian via the Github file(.policy) or IPSF timestamp(1706881469.628524368). \ +This policy can be imported to hedera guardian via the Github file(.policy) or IPFS timestamp(1706881469.628524368). \ **Policy Workflow**: ![image](https://github.com/ankurgupta007/guardian/assets/98680735/ae68213a-d1a3-4b78-848b-1b69b0e215ec) diff --git a/Methodology Library/Verra/VM0044/readme.md b/Methodology Library/Verra/VM0044/readme.md index 863e82729..ec927d890 100644 --- a/Methodology Library/Verra/VM0044/readme.md +++ b/Methodology Library/Verra/VM0044/readme.md @@ -39,7 +39,7 @@ Within this digital methodology, users will use a collection of Clean Developmen ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Policy: 1698754217.516521003 diff --git a/Methodology Library/Verra/VMR0006/README.md b/Methodology Library/Verra/VMR0006/README.md index a79abcbdf..3935ff379 100644 --- a/Methodology Library/Verra/VMR0006/README.md +++ b/Methodology Library/Verra/VMR0006/README.md @@ -55,7 +55,7 @@ Various methodologies are used to quantify emissions reductions in cookstove pro ### Policy Import -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### Available roles diff --git a/Methodology Library/Verra/Work In Progress/VM0033/readme.md b/Methodology Library/Verra/Work In Progress/VM0033/readme.md index d66683fe6..55f81b253 100644 --- a/Methodology Library/Verra/Work In Progress/VM0033/readme.md +++ b/Methodology Library/Verra/Work In Progress/VM0033/readme.md @@ -59,12 +59,13 @@ VM0033 on Guardian platform is set to become a powerful tool for scaling up tida ## Policy Workflow +Coming Soon ## Policy Guide First version of this policy is published to the Hedera network and can be imported via Github (.policy file) or IPFS timestamp. -IPSF Timestamp of 0.0.1 - 1728312926.986578213 +IPFS Timestamp of 0.0.1 - 1728312926.986578213 ### Available Roles @@ -110,4 +111,4 @@ Verified Carbon Units (VCUs), each equivalent to one tonne of CO2 equivalent emi 10. The issued VCUs are recorded on the Hedera ledger, ensuring transparency and traceability. -[TODO: Detailed steps with screenshots to be added] \ No newline at end of file +[TODO: Detailed steps with screenshots to be added] diff --git a/Methodology Library/readme.md b/Methodology Library/readme.md index e30ca0ca7..772cde4f3 100644 --- a/Methodology Library/readme.md +++ b/Methodology Library/readme.md @@ -63,9 +63,10 @@ This folder contains sub folders for each Methodology and their authors (with ma 3. GHG Scope II - Atma 4. GHG Scope 3 - Atma 5. GHGP Corporate Standard - Envision -6. GHGP Product Standard - Avery Dennison (Coming soon) -7. E-Mission (Coming soon) -8. EPA CFR 40 p75 - Serapis (Coming soon) +6. Climate Action Reserve’s U.S. Landfill Protocol Version 6.0 - Envision +7. GHGP Product Standard - Avery Dennison (Coming soon) +8. E-Mission (Coming soon) +9. EPA CFR 40 p75 - Serapis (Coming soon) ### Hackathon Policies (Under Testing) diff --git a/ai-service/.gitignore b/ai-service/.gitignore index e1597c74f..82f0c3ac6 100644 --- a/ai-service/.gitignore +++ b/ai-service/.gitignore @@ -1,2 +1 @@ -/faiss-vector/ /data/ diff --git a/ai-service/faiss-vector/faiss.index b/ai-service/faiss-vector/faiss.index new file mode 100644 index 000000000..e69de29bb diff --git a/api-gateway/src/api/service/policy.ts b/api-gateway/src/api/service/policy.ts index 725d0017e..99e09bee0 100644 --- a/api-gateway/src/api/service/policy.ts +++ b/api-gateway/src/api/service/policy.ts @@ -29,6 +29,7 @@ export class PolicyApi { @Auth( Permissions.POLICIES_POLICY_READ, Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, Permissions.POLICIES_POLICY_AUDIT, // UserRole.STANDARD_REGISTRY, // UserRole.USER, @@ -95,6 +96,7 @@ export class PolicyApi { @Auth( Permissions.POLICIES_POLICY_READ, Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, Permissions.POLICIES_POLICY_AUDIT, // UserRole.STANDARD_REGISTRY, // UserRole.USER, @@ -453,6 +455,7 @@ export class PolicyApi { @Auth( Permissions.POLICIES_POLICY_READ, Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, Permissions.POLICIES_POLICY_AUDIT, // UserRole.STANDARD_REGISTRY, // UserRole.USER, @@ -880,6 +883,7 @@ export class PolicyApi { @Get('/:policyId/navigation') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -925,6 +929,7 @@ export class PolicyApi { @Get('/:policyId/groups') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1319,6 +1324,7 @@ export class PolicyApi { @Post('/:policyId/groups') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1371,6 +1377,7 @@ export class PolicyApi { @Get('/:policyId/blocks') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1417,6 +1424,7 @@ export class PolicyApi { @Get('/:policyId/blocks/:uuid') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1473,6 +1481,7 @@ export class PolicyApi { @Post('/:policyId/blocks/:uuid') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1538,6 +1547,7 @@ export class PolicyApi { @Post('/:policyId/tag/:tagName/blocks') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1603,6 +1613,7 @@ export class PolicyApi { @Get('/:policyId/tag/:tagName') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1654,6 +1665,7 @@ export class PolicyApi { @Get('/:policyId/tag/:tagName/blocks') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -1709,6 +1721,7 @@ export class PolicyApi { @Get('/:policyId/blocks/:uuid/parents') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -3057,6 +3070,7 @@ export class PolicyApi { @Get('/:policyId/multiple') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) @@ -3099,6 +3113,7 @@ export class PolicyApi { @Post('/:policyId/multiple/') @Auth( Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, // UserRole.STANDARD_REGISTRY, // UserRole.USER, ) diff --git a/api-gateway/src/api/service/project.ts b/api-gateway/src/api/service/project.ts index 6ae0b1265..cd743423d 100644 --- a/api-gateway/src/api/service/project.ts +++ b/api-gateway/src/api/service/project.ts @@ -198,38 +198,30 @@ export class ProjectsAPI { throw new HttpException('Invalid parameters', HttpStatus.UNPROCESSABLE_ENTITY); } - const idLvl = 0; - const eventsLvl = 0; - const propLvl = 2; - const childrenLvl = 0; - const user = null; - + const rowDocuments = await guardians.getVcDocuments({ id: ids }); let samePolicy: boolean = true; - const _data = await guardians.getVcDocuments({ id: ids }); - for (let index = 1; index < _data.length; index++) { - if (_data[index - 1].policyId !== _data[index].policyId) { + const policyIds: string[] = []; + for (const id of ids) { + const doc = rowDocuments.find((e) => e.id === id); + if (doc) { + policyIds.push(doc.policyId); + } else { + policyIds.push(null); + } + if (policyIds.length > 1 && policyIds[policyIds.length - 2] !== policyIds[policyIds.length - 1]) { samePolicy = false; - break; } } - const policyIds = _data.map((p: any) => p.policyId); - + const idLvl = 0; + const eventsLvl = 0; + const propLvl = 2; + const childrenLvl = 0; + const user = null; const refLvl = samePolicy ? 'Revert' : 'Merge'; const keyLvl = samePolicy ? 'Default' : 'Property'; try { - const comparationVpArray = await guardians.compareVPDocuments( - user, - null, - policyIds, - '1', - '2', - '2', - '0', - 0, - 'Direct' - ); const comparationVc = await guardians.compareDocuments( user, null, @@ -241,6 +233,17 @@ export class ProjectsAPI { keyLvl, refLvl ); + const comparationVpArray = await guardians.compareVPDocuments( + user, + null, + policyIds, + '1', + '2', + '2', + '0', + 0, + 'Direct' + ); return { projects: comparationVc, presentations: comparationVpArray diff --git a/api-gateway/src/api/service/schema-rules.ts b/api-gateway/src/api/service/schema-rules.ts index edd5e73d6..f43f25a05 100644 --- a/api-gateway/src/api/service/schema-rules.ts +++ b/api-gateway/src/api/service/schema-rules.ts @@ -1,6 +1,6 @@ import { IAuthUser, PinoLogger } from '@guardian/common'; import { Body, Controller, Delete, Get, HttpCode, HttpException, HttpStatus, Param, Post, Put, Query, Response } from '@nestjs/common'; -import { Permissions } from '@guardian/interfaces'; +import { Permissions, UserPermissions } from '@guardian/interfaces'; import { ApiBody, ApiInternalServerErrorResponse, ApiOkResponse, ApiOperation, ApiTags, ApiQuery, ApiExtraModels, ApiParam } from '@nestjs/swagger'; import { Examples, InternalServerErrorDTO, SchemaRuleDTO, SchemaRuleDataDTO, SchemaRuleRelationshipsDTO, pageHeader } from '#middlewares'; import { Guardians, InternalException, EntityOwner } from '#helpers'; @@ -384,7 +384,7 @@ export class SchemaRulesApi { * Get rules and data */ @Post('/data') - @Auth(Permissions.SCHEMAS_RULE_EXECUTE) + @Auth() @ApiOperation({ summary: '', description: '', @@ -412,9 +412,13 @@ export class SchemaRulesApi { if (!options) { throw new HttpException('Invalid config.', HttpStatus.UNPROCESSABLE_ENTITY); } - const owner = new EntityOwner(user); - const guardian = new Guardians(); - return await guardian.getSchemaRuleData(options, owner); + if (!UserPermissions.has(user, Permissions.SCHEMAS_RULE_EXECUTE)) { + return null; + } else { + const owner = new EntityOwner(user); + const guardian = new Guardians(); + return await guardian.getSchemaRuleData(options, owner); + } } catch (error) { await InternalException(error, this.logger); } diff --git a/api-gateway/src/api/service/schema.ts b/api-gateway/src/api/service/schema.ts index 8e51ecba2..d4d7bb557 100644 --- a/api-gateway/src/api/service/schema.ts +++ b/api-gateway/src/api/service/schema.ts @@ -1720,7 +1720,7 @@ export class SchemaApi { */ @Get('/:schemaId/export/message') @Auth( - Permissions.SCHEMAS_SCHEMA_CREATE, + Permissions.SCHEMAS_SCHEMA_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ @@ -1774,7 +1774,7 @@ export class SchemaApi { */ @Get('/:schemaId/export/file') @Auth( - Permissions.SCHEMAS_SCHEMA_CREATE, + Permissions.SCHEMAS_SCHEMA_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ @@ -2266,7 +2266,7 @@ export class SchemaApi { */ @Get('/:schemaId/export/xlsx') @Auth( - Permissions.SCHEMAS_SCHEMA_CREATE, + Permissions.SCHEMAS_SCHEMA_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ @@ -2489,7 +2489,7 @@ export class SchemaApi { */ @Get('/export/template') @Auth( - Permissions.SCHEMAS_SCHEMA_CREATE, + Permissions.SCHEMAS_SCHEMA_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ diff --git a/api-gateway/src/api/service/tool.ts b/api-gateway/src/api/service/tool.ts index 5919093ac..b6c1fa718 100644 --- a/api-gateway/src/api/service/tool.ts +++ b/api-gateway/src/api/service/tool.ts @@ -550,7 +550,7 @@ export class ToolsApi { */ @Get('/:id/export/file') @Auth( - Permissions.TOOLS_TOOL_CREATE, + Permissions.TOOLS_TOOL_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ @@ -597,7 +597,7 @@ export class ToolsApi { */ @Get('/:id/export/message') @Auth( - Permissions.TOOLS_TOOL_CREATE, + Permissions.TOOLS_TOOL_READ, // UserRole.STANDARD_REGISTRY, ) @ApiOperation({ diff --git a/common/src/import-export/policy-statistic.ts b/common/src/import-export/policy-statistic.ts index 09ea98a90..4c179f604 100644 --- a/common/src/import-export/policy-statistic.ts +++ b/common/src/import-export/policy-statistic.ts @@ -1,6 +1,8 @@ import JSZip from 'jszip'; -import { PolicyStatistic } from '../entity/index.js'; -import { IFormulaData, IRuleData, IScoreData, IScoreOption, IStatisticConfig, IVariableData } from '@guardian/interfaces'; +import { Policy, PolicyStatistic, Schema as SchemaCollection } from '../entity/index.js'; +import { IFormulaData, IRuleData, IScoreData, IScoreOption, IStatisticConfig, IVariableData, Schema, SchemaEntity, SchemaStatus } from '@guardian/interfaces'; +import { DatabaseServer } from '../database-modules/index.js'; +import { PolicyImportExport } from './policy.js'; /** * PolicyStatistic components @@ -77,6 +79,71 @@ export class PolicyStatisticImportExport { return { definition }; } + /** + * Load policy schemas + * @param policy policy + * @returns policy schemas + */ + public static async getPolicySchemas(policy: Policy): Promise { + const { schemas, toolSchemas } = await PolicyImportExport.loadAllSchemas(policy); + const systemSchemas = await DatabaseServer.getSchemas({ + topicId: policy.topicId, + entity: { $in: [SchemaEntity.MINT_TOKEN, SchemaEntity.MINT_NFTOKEN] } + }); + + const all = [] + .concat(schemas, toolSchemas, systemSchemas) + .filter((s) => s.status === SchemaStatus.PUBLISHED && s.entity !== 'EVC'); + return all; + } + + /** + * Update schema uuid + * @param schemas policy schemas + * @param data config + * @returns new config + */ + public static updateSchemas(schemas: SchemaCollection[], data?: IStatisticConfig): IStatisticConfig | undefined { + if (!data) { + return; + } + + const fieldMap = new Map(); + const schemaObjects = schemas.map((s) => new Schema(s)); + for (const schema of schemaObjects) { + const allFields = schema.getFields(); + for (const field of allFields) { + const key = `${schema.name}|${field.path}|${field.description}|${field.type}|${field.isArray}|${field.isRef}`; + fieldMap.set(key, schema.iri); + } + } + + const schemaMap = new Map(); + const variables = data.variables; + const rules = data.rules; + + if (Array.isArray(variables)) { + for (const variable of variables) { + const key = `${variable.schemaName}|${variable.path}|${variable.fieldDescription}|${variable.fieldType}|${variable.fieldArray}|${variable.fieldRef}`; + schemaMap.set(variable.schemaId, fieldMap.get(key)); + } + } + + if (Array.isArray(variables)) { + for (const variable of variables) { + variable.schemaId = schemaMap.get(variable.schemaId); + } + } + + if (Array.isArray(rules)) { + for (const rule of rules) { + rule.schemaId = schemaMap.get(rule.schemaId); + } + } + + return data; + } + /** * Validate Config * diff --git a/common/src/import-export/policy.ts b/common/src/import-export/policy.ts index 5b503b5c5..1fa950632 100644 --- a/common/src/import-export/policy.ts +++ b/common/src/import-export/policy.ts @@ -1,5 +1,5 @@ import JSZip from 'jszip'; -import { Artifact, Policy, PolicyCategory, PolicyTest, PolicyTool, Schema, Tag, Token } from '../entity/index.js'; +import { Artifact, Policy, PolicyCategory, PolicyTool, Schema, Tag, Token } from '../entity/index.js'; import { DatabaseServer } from '../database-modules/index.js'; import { ImportExportUtils } from './utils.js'; import { PolicyCategoryExport } from '@guardian/interfaces'; diff --git a/docs/.gitbook/assets/Climate Action Reserve Workflow (3).png b/docs/.gitbook/assets/Climate Action Reserve Workflow (3).png new file mode 100644 index 000000000..f188ae264 Binary files /dev/null and b/docs/.gitbook/assets/Climate Action Reserve Workflow (3).png differ diff --git a/docs/.gitbook/assets/image (696).png b/docs/.gitbook/assets/image (696).png new file mode 100644 index 000000000..fba0de575 Binary files /dev/null and b/docs/.gitbook/assets/image (696).png differ diff --git a/docs/.gitbook/assets/image (697).png b/docs/.gitbook/assets/image (697).png new file mode 100644 index 000000000..9f7747fe5 Binary files /dev/null and b/docs/.gitbook/assets/image (697).png differ diff --git a/docs/.gitbook/assets/image (698).png b/docs/.gitbook/assets/image (698).png new file mode 100644 index 000000000..19b6faf62 Binary files /dev/null and b/docs/.gitbook/assets/image (698).png differ diff --git a/docs/.gitbook/assets/image (699).png b/docs/.gitbook/assets/image (699).png new file mode 100644 index 000000000..4abb76f69 Binary files /dev/null and b/docs/.gitbook/assets/image (699).png differ diff --git a/docs/.gitbook/assets/image (700).png b/docs/.gitbook/assets/image (700).png new file mode 100644 index 000000000..fe568688c Binary files /dev/null and b/docs/.gitbook/assets/image (700).png differ diff --git a/docs/.gitbook/assets/image (701).png b/docs/.gitbook/assets/image (701).png new file mode 100644 index 000000000..926027202 Binary files /dev/null and b/docs/.gitbook/assets/image (701).png differ diff --git a/docs/.gitbook/assets/image (702).png b/docs/.gitbook/assets/image (702).png new file mode 100644 index 000000000..262b2f536 Binary files /dev/null and b/docs/.gitbook/assets/image (702).png differ diff --git a/docs/.gitbook/assets/image (703).png b/docs/.gitbook/assets/image (703).png new file mode 100644 index 000000000..c7e4622ca Binary files /dev/null and b/docs/.gitbook/assets/image (703).png differ diff --git a/docs/.gitbook/assets/image (704).png b/docs/.gitbook/assets/image (704).png new file mode 100644 index 000000000..df21109d0 Binary files /dev/null and b/docs/.gitbook/assets/image (704).png differ diff --git a/docs/.gitbook/assets/image (705).png b/docs/.gitbook/assets/image (705).png new file mode 100644 index 000000000..453e5290e Binary files /dev/null and b/docs/.gitbook/assets/image (705).png differ diff --git a/docs/.gitbook/assets/image (706).png b/docs/.gitbook/assets/image (706).png new file mode 100644 index 000000000..cf207e85f Binary files /dev/null and b/docs/.gitbook/assets/image (706).png differ diff --git a/docs/.gitbook/assets/image (707).png b/docs/.gitbook/assets/image (707).png new file mode 100644 index 000000000..323ababd7 Binary files /dev/null and b/docs/.gitbook/assets/image (707).png differ diff --git a/docs/.gitbook/assets/image (708).png b/docs/.gitbook/assets/image (708).png new file mode 100644 index 000000000..030662bfc Binary files /dev/null and b/docs/.gitbook/assets/image (708).png differ diff --git a/docs/.gitbook/assets/image (709).png b/docs/.gitbook/assets/image (709).png new file mode 100644 index 000000000..554dfce65 Binary files /dev/null and b/docs/.gitbook/assets/image (709).png differ diff --git a/docs/.gitbook/assets/image (710).png b/docs/.gitbook/assets/image (710).png new file mode 100644 index 000000000..13998ff9d Binary files /dev/null and b/docs/.gitbook/assets/image (710).png differ diff --git a/docs/.gitbook/assets/image (711).png b/docs/.gitbook/assets/image (711).png new file mode 100644 index 000000000..e18bab4e7 Binary files /dev/null and b/docs/.gitbook/assets/image (711).png differ diff --git a/docs/.gitbook/assets/image (712).png b/docs/.gitbook/assets/image (712).png new file mode 100644 index 000000000..744074c37 Binary files /dev/null and b/docs/.gitbook/assets/image (712).png differ diff --git a/docs/.gitbook/assets/image (713).png b/docs/.gitbook/assets/image (713).png new file mode 100644 index 000000000..8714d2d04 Binary files /dev/null and b/docs/.gitbook/assets/image (713).png differ diff --git a/docs/.gitbook/assets/image (714).png b/docs/.gitbook/assets/image (714).png new file mode 100644 index 000000000..4025b4a1a Binary files /dev/null and b/docs/.gitbook/assets/image (714).png differ diff --git a/docs/.gitbook/assets/image (715).png b/docs/.gitbook/assets/image (715).png new file mode 100644 index 000000000..9b5b6c5b4 Binary files /dev/null and b/docs/.gitbook/assets/image (715).png differ diff --git a/docs/.gitbook/assets/image (716).png b/docs/.gitbook/assets/image (716).png new file mode 100644 index 000000000..e11db349a Binary files /dev/null and b/docs/.gitbook/assets/image (716).png differ diff --git a/docs/.gitbook/assets/image (717).png b/docs/.gitbook/assets/image (717).png new file mode 100644 index 000000000..29d06bbbf Binary files /dev/null and b/docs/.gitbook/assets/image (717).png differ diff --git a/docs/.gitbook/assets/image (718).png b/docs/.gitbook/assets/image (718).png new file mode 100644 index 000000000..832f4d727 Binary files /dev/null and b/docs/.gitbook/assets/image (718).png differ diff --git a/docs/.gitbook/assets/image (719).png b/docs/.gitbook/assets/image (719).png new file mode 100644 index 000000000..3e10cdb04 Binary files /dev/null and b/docs/.gitbook/assets/image (719).png differ diff --git a/docs/.gitbook/assets/image (720).png b/docs/.gitbook/assets/image (720).png new file mode 100644 index 000000000..512bd747c Binary files /dev/null and b/docs/.gitbook/assets/image (720).png differ diff --git a/docs/.gitbook/assets/image (721).png b/docs/.gitbook/assets/image (721).png new file mode 100644 index 000000000..78b3db545 Binary files /dev/null and b/docs/.gitbook/assets/image (721).png differ diff --git a/docs/.gitbook/assets/image (722).png b/docs/.gitbook/assets/image (722).png new file mode 100644 index 000000000..ae46b516e Binary files /dev/null and b/docs/.gitbook/assets/image (722).png differ diff --git a/docs/.gitbook/assets/image (723).png b/docs/.gitbook/assets/image (723).png new file mode 100644 index 000000000..566d324f9 Binary files /dev/null and b/docs/.gitbook/assets/image (723).png differ diff --git a/docs/.gitbook/assets/image (724).png b/docs/.gitbook/assets/image (724).png new file mode 100644 index 000000000..512bd747c Binary files /dev/null and b/docs/.gitbook/assets/image (724).png differ diff --git a/docs/.gitbook/assets/image (725).png b/docs/.gitbook/assets/image (725).png new file mode 100644 index 000000000..78b3db545 Binary files /dev/null and b/docs/.gitbook/assets/image (725).png differ diff --git a/docs/.gitbook/assets/image (726).png b/docs/.gitbook/assets/image (726).png new file mode 100644 index 000000000..c5e8e1d2e Binary files /dev/null and b/docs/.gitbook/assets/image (726).png differ diff --git a/docs/.gitbook/assets/image (727).png b/docs/.gitbook/assets/image (727).png new file mode 100644 index 000000000..72095dc81 Binary files /dev/null and b/docs/.gitbook/assets/image (727).png differ diff --git a/docs/.gitbook/assets/image (728).png b/docs/.gitbook/assets/image (728).png new file mode 100644 index 000000000..2f8e3af66 Binary files /dev/null and b/docs/.gitbook/assets/image (728).png differ diff --git a/docs/.gitbook/assets/image (729).png b/docs/.gitbook/assets/image (729).png new file mode 100644 index 000000000..aaf6929f0 Binary files /dev/null and b/docs/.gitbook/assets/image (729).png differ diff --git a/docs/.gitbook/assets/image (730).png b/docs/.gitbook/assets/image (730).png new file mode 100644 index 000000000..5cbf37ab7 Binary files /dev/null and b/docs/.gitbook/assets/image (730).png differ diff --git a/docs/.gitbook/assets/image (731).png b/docs/.gitbook/assets/image (731).png new file mode 100644 index 000000000..97c17ec2a Binary files /dev/null and b/docs/.gitbook/assets/image (731).png differ diff --git a/docs/.gitbook/assets/image (732).png b/docs/.gitbook/assets/image (732).png new file mode 100644 index 000000000..7b78047dc Binary files /dev/null and b/docs/.gitbook/assets/image (732).png differ diff --git a/docs/.gitbook/assets/image (733).png b/docs/.gitbook/assets/image (733).png new file mode 100644 index 000000000..bcde7e55e Binary files /dev/null and b/docs/.gitbook/assets/image (733).png differ diff --git a/docs/.gitbook/assets/image (734).png b/docs/.gitbook/assets/image (734).png new file mode 100644 index 000000000..bdf967493 Binary files /dev/null and b/docs/.gitbook/assets/image (734).png differ diff --git a/docs/.gitbook/assets/image (735).png b/docs/.gitbook/assets/image (735).png new file mode 100644 index 000000000..3097a0022 Binary files /dev/null and b/docs/.gitbook/assets/image (735).png differ diff --git a/docs/.gitbook/assets/image (736).png b/docs/.gitbook/assets/image (736).png new file mode 100644 index 000000000..609559247 Binary files /dev/null and b/docs/.gitbook/assets/image (736).png differ diff --git a/docs/.gitbook/assets/image (737).png b/docs/.gitbook/assets/image (737).png new file mode 100644 index 000000000..ef3faff35 Binary files /dev/null and b/docs/.gitbook/assets/image (737).png differ diff --git a/docs/.gitbook/assets/image (738).png b/docs/.gitbook/assets/image (738).png new file mode 100644 index 000000000..77c4067c1 Binary files /dev/null and b/docs/.gitbook/assets/image (738).png differ diff --git a/docs/.gitbook/assets/image (739).png b/docs/.gitbook/assets/image (739).png new file mode 100644 index 000000000..c460712c9 Binary files /dev/null and b/docs/.gitbook/assets/image (739).png differ diff --git a/docs/.gitbook/assets/image (740).png b/docs/.gitbook/assets/image (740).png new file mode 100644 index 000000000..af99282f7 Binary files /dev/null and b/docs/.gitbook/assets/image (740).png differ diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index 889413a2d..a2019d351 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -161,7 +161,7 @@ * [Delete the rule by its ID](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/delete-the-rule-by-its-id.md) * [Activate the rule with the specified ID](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/activate-the-rule-with-the-specified-id.md) * [Deactivate the rule with the specified ID](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/deactivate-the-rule-with-the-specified-id.md) - * [List all the schemas and policy relevant to the rule with the specified ID](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/list-all-the-schemas-and-policy-relevant-to-the-rule-with-the-specified-id.md) + * [List all the schemas and policy relevant to the rule with the specified ID](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/list-all-the-schemas-and-policy-relevant-to-the-rule-with-the-specified-id.md) * [Retrieve all the data needed for evaluating the rules](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/retrieve-all-the-data-needed-for-evaluating-the-rules.md) * [Create a new rule from the file](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/create-a-new-rule-from-the-file.md) * [Load the file and return its preview](guardian/standard-registry/schemas/schema-rules/apis-related-to-schema-rules/load-the-file-and-return-its-preview.md) @@ -704,6 +704,7 @@ * [🏭 Atma Scope 3 GHG Policy](guardian/demo-guide/carbon-emissions/atma-scope-3-ghg-policy.md) * [🏭 GHGP Corporate Standard](guardian/demo-guide/carbon-emissions/ghgp-corporate-standard.md) * [🏭 GHGP Corporate Standard V2](guardian/demo-guide/carbon-emissions/ghgp-corporate-standard-v2.md) + * [Climate Action Reserve’s U.S. Landfill Protocol Version 6.0](guardian/demo-guide/carbon-emissions/climate-action-reserves-u.s.-landfill-protocol-version-6.0.md) * [❓ FAQs](guardian/faqs.md) * [👬 Community Standards](guardian/community-standards/README.md) * [Guardian Policy Standards (GPS)](guardian/community-standards/guardian-policy-standards-gps.md) diff --git a/docs/guardian/demo-guide/carbon-emissions/climate-action-reserves-u.s.-landfill-protocol-version-6.0.md b/docs/guardian/demo-guide/carbon-emissions/climate-action-reserves-u.s.-landfill-protocol-version-6.0.md new file mode 100644 index 000000000..b802a784f --- /dev/null +++ b/docs/guardian/demo-guide/carbon-emissions/climate-action-reserves-u.s.-landfill-protocol-version-6.0.md @@ -0,0 +1,204 @@ +--- +icon: mountain +--- + +# Climate Action Reserve’s U.S. Landfill Protocol Version 6.0 + +## Table of Contents + +[Introduction](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc1569113010) + +[Need and Use](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312046) + +[Monitoring and Quantification Approach](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312047) + +[Project Eligibility and Additionality](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312048) + +[Project Type](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312049) + +[Demo Video](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc80095333) + +[Policy Workflow](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312050) + +[Policy Import](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312051) + +[Available Roles](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312052) + +[Important Schemas](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312053) + +[Token (Climate Reserve Tonnes - CRTs)](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312054) + +[Step By Step](climate-action-reserves-u.s.-landfill-protocol-version-6.0.md#toc182312055) + +## **Introduction** + +The U.S. Landfill Protocol, developed by the Climate Action Reserve, is a standardized framework for quantifying, reporting, and verifying greenhouse gas (GHG) emission reductions from methane capture and destruction projects at U.S. landfills. Approved by the Integrity Council for the Voluntary Carbon Market (ICVCM), this protocol ensures that emission reductions meet high standards of environmental integrity, credibility, and transparency. This ICVCM approval reinforces the protocol’s alignment with global best practices and its credibility in voluntary carbon markets. + +## **Need and Use** + +The U.S. Landfill Protocol supports project developers in creating projects that effectively reduce emissions from landfill sites. It includes a framework for calculating and verifying emission reductions generated by capturing methane, a potent greenhouse gas, from landfills and using or destroying it. This protocol helps landfill operators and project developers earn credits that can be used to offset emissions in compliance and voluntary carbon markets. + +## **Monitoring and Quantification Approach** + +The U.S. Landfill Protocol specifies detailed requirements for calculating baseline and project emissions to assess net GHG emission reductions. Key components include: + +* **Methane Capture and Destruction**: Continuous monitoring of landfill gas (LFG) collection and destruction is required, using calibrated equipment to measure LFG flow and methane concentration, ensuring accurate quantification of emissions destroyed. +* **Baseline Emissions**: Baseline emissions are calculated based on the methane emissions that would have occurred without the project, minus any methane oxidized by soil bacteria or destroyed by any existing destruction devices. These calculations use specific discount factors for oxidation and other adjustments according to landfill characteristics and monitoring frequency. +* **Project Emissions**: Project emissions include any emissions from the energy used to operate the landfill gas collection and destruction systems, as well as emissions from fossil fuels or supplemental energy sources. +* **Emission Reductions**: Net emission reductions are calculated by subtracting project emissions from baseline emissions, providing a clear assessment of GHG reductions achieved through methane destruction. + +This structured approach allows for consistent, transparent, and conservative reporting of GHG emission reductions in line with the Climate Action Reserve's requirements. + +## **Project Eligibility and Additionality** + +Projects must meet specific eligibility requirements to qualify under this protocol: + +* **Location**: Projects must be located at landfills in the U.S., including tribal lands and territories. +* **Project Start Date**: The project start date should be defined by the first instance of landfill gas destruction, with project submission required within 12 months of this date. +* **Additionality** is demonstrated through the **performance standard test** (based on baseline conditions) and the **legal requirement test** (ensuring no regulatory mandates already require methane destruction). Only landfill projects that collect and destroy methane beyond regulatory requirements qualify for additionality, ensuring that GHG reductions are surplus to business-as-usual scenarios. + +## **Project Type** + +This protocol applies to projects that aim to reduce methane emissions from U.S. landfills by capturing and utilizing or destroying methane gas. Only projects that meet the Climate Action Reserve’s eligibility criteria and have not issued credits under other GHG programs are eligible. + +## **Demo Video** + +[**Youtube**](https://www.youtube.com/watch?v=fDTobFguBJE) + +## **Policy Workflow** + +The policy workflow replicates the procedural steps required for landfill gas projects, covering project registration, additionality demonstration, and the submission of monitoring reports to claim carbon credits. + +
+ +## **Policy Import** + +This policy is available for import via GitHub or IPFS timestamp. + +## **Available Roles** + +1. **Project Developer**: Responsible for overall project management, documentation submission, and coordination with verification bodies. +2. **Verifier(s)**: Independent third-party verifiers who assess project data and ensure compliance with protocol requirements. +3. **Standard Registry**: The governing body that maintains project records, manages reporting, and approves credit issuance. + +## **Important Schemas** + +* **Project Submission Form:** This form provides a comprehensive overview of the project, including landfill site details, baseline emissions, monitoring plans, and stakeholder consultations. It serves as the foundational document for project registration with the Climate Action Reserve. +* **Verification Report:** Submitted by an independent third-party verifier, this report evaluates the accuracy of the emissions reductions claimed by the project. It includes findings on project compliance, monitoring data, and any identified discrepancies or recommendations for improvement. +* **Emission Reductions Report:** This document provides a summary of the emission reductions accomplished by the project. It details the methodologies used, calculations used, and calculation results. +* **Attestation of Title Form:** Signed by the Project Developer, this form certifies ownership of the project’s GHG reductions. Clear ownership documentation ensures that CRTs are issued only to entities with verified rights to claim these reductions. +* **Attestation of Voluntary Implementation:** This attestation is signed by the Project Developer to confirm that the project’s landfill gas collection and destruction activities go beyond any regulatory requirements. It is submitted prior to verification, ensuring that GHG reductions are genuinely additional to what would have occurred under existing legal mandates. +* **Attestation of Regulatory Compliance:** This attestation confirms that the project complies with all applicable federal, state, and local laws and regulations (e.g., environmental, safety). The Project Developer must disclose any instances of non-compliance to the verifier, who assesses the impact on credit issuance. +* **Environmental Safeguards:** This schema ensures that project activities align with environmental best practices, minimizing unintended impacts on local ecosystems, air, water, and soil quality. The Project Developer must document steps taken to prevent adverse environmental effects, such as controlling potential pollutants, managing resource use, and protecting nearby habitats. This schema may also include periodic environmental impact assessments as part of ongoing compliance and verification. + +## **Token (**Climate Reserve Tonnes - CRTs**)** + +Each CRT represents one metric tonne of carbon dioxide equivalent (tCO₂e) reduced or avoided through the approved project activities. + +## **Step By Step** + +1. Import the policy using IPFS or Policy File. Once imported, you will be redirected to the policy configurator. + +
+ +
+ +
+ +2. Set the policy to Dry Run or Publish it using the dropdown. Then select “Go” or “Register”. + +
+ +
+ +3. Create a new user account and assign their role as the Project Developer. + +
+ +
+ +
+ +
+ +4. Log in to the administrator account to review and approve the Project Developer account. + +
+ +5. The Project Developer can click 'Create Project' to submit the Project Submission Form. + +
+ +
+ +6. The Project Developer can also submit a Project Diagram. + +
+ +
+ +7. Return to the administrator account to review the Project Submission Form and the Project Diagram for completeness and approve both submissions. + +
+ +
+ +8. Now, we will create a new user to add a verifier account. + +
+ +
+ +
+ +
+ +9. Log in to the Administrator account to review and approve the Verifier account. + +
+ +10. The Verifier must complete a NOVA/COI form and submit it to the Administrator. + +
+ +11. Log in to the Administrator account and approve the NOVA/COI form. This policy also allows the Administrator to identify a conflict of interest (COI) as outlined in the workflow above. + +
+ +12. Once the NOVA/COI form has been approved with no COIs identified you can log in to the Project Developer account and assign a Verifier to the project. This will allow the verifier to see the project submission form. + +
+ +
+ +
+ +13. The Project Developer can submit all the forms outlined in the project documents tab, assigning the third-party verifier to each form. + +
+ +
+ +14. Using the assigned Verifier account, they can now review and approve all the project documents. Once approved they can complete and submit the Verification Report. + +
+ +
+ +
+ +15. The Administrator must review the report for completeness and choose to approve or reject. + +
+ +16. The final step before issuing credits would be to approve the Emission Reductions from the Administrators account. + +
+ +17. The project owner and administrator will have access to the Verifiable Presentation (VP) and Trust Chain. + +
+ +
+ +
diff --git a/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard-v2.md b/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard-v2.md index b8b6ec0f5..f9744d540 100644 --- a/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard-v2.md +++ b/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard-v2.md @@ -26,33 +26,55 @@ [Futureproofing (Automated GHG Inventories)](ghgp-corporate-standard-v2.md#toc146717959) -[TODO](ghgp-corporate-standard-v2.md#toc146717960) - ### Introduction -The GHG Protocol Corporate Accounting and Reporting Standard (GHGP Corporate Standard) is the world’s leading standard outlining requirements and guidance for corporate-level and organizational-level GHG emission inventories. As of 2016, approximately 92% of Fortune 500 companies responding to the CDP—an investor-led effort to increase corporate carbon disclosures—referenced the used the GHGP Corporate Standard to conduct their GHG inventories.\[1] Also, many other GHG-related standards—such as the Natural Capital Partner’s CarbonNeutral Protocol and the Science Based Targets Initiative (SBTi)—point to the Greenhouse Gas Protocol as the default standard for the quantification and accounting of corporate GHG emissions. As future regulations and standards are developed and implemented, they are likely to either prescribe or encourage the use of Greenhouse Gas Protocol standards. +The GHG Protocol Corporate Accounting and Reporting Standard (GHGP Corporate Standard) is the world’s leading standard outlining requirements and guidance for corporate-level and organizational-level GHG emission inventories. Approximately 92% of Fortune 500 companies responding to the CDP—an investor-led effort to increase corporate carbon disclosures—referenced the used the GHGP Corporate Standard to conduct their GHG inventories.\[1] Also, many other GHG-related standards—such as the Natural Capital Partner’s CarbonNeutral Protocol and the Science Based Targets Initiative (SBTi)—point to the Greenhouse Gas Protocol as the commonplace standard for the quantification and accounting of corporate GHG emissions. As future regulations and standards are developed and implemented, they may either prescribe or encourage the use of Greenhouse Gas Protocol standards. + +This Guardian Policy mints Carbon Emission Tokens (CETs) in accordance with the GHGP Corporate Standard, including the Scope 2 Guidance, which was later published as an amendment to the GHGP Corporate Standard. In addition, the policy includes functionality to attribute emissions to products and services and use this data to calculate and publish product carbon footprints (PCFs) in accordance with the Pathfinder Framework v2.0. The policy and methodologies are designed to calculate emissions based on MRV data that can either be input manually by the organization, or automatically through API and trusted external data sources. The policy is equipped with standard emission factors (such as eGRID emission rates) and Intergovernmental Panel on Climate Change (IPCC) global warming potentials (GWPs). + +The policy currently covers the following sources and future versions will have the ability add new modulated source categories and custom source schemas. + +Included Sources: + +\- Scope 1: Stationary Combustion + +\- Scope 1: Mobile Combustion + +\- Scope 1: Refrigerants -This Guardian Policy mints Carbon Emission Tokens (CETs) in accordance with the GHGP Corporate Standard, including the Scope 2 Guidance, which was later published as an amendment to the GHGP Corporate Standard. The policy and methodologies are designed to calculate emissions based on MRV data that can either be provided manually by the organization, or automatically sourced from devices such as IoT-enabled electricity meters. The policy is equipped with standard emission factors (such as eGRID emission rates) and Intergovernmental Panel on Climate Change (IPCC) global warming potentials (GWPs). +\- Scope 2: Purchased Electricity (Location-Based) -The policy currently covers emissions from grid electricity consumption (location and market-based calculations), natural gas consumption, mobile combustion, and refrigerants. The policy is designed to be dynamic, allowing organizations to assign entities to organizations, assets/emission generating objects (EGOs) to entities, and GHG sources to assets/EGOs in a hierarchical structure to dynamically tailor the policy and inventory to specific corporate structures and operations. +\- Scope 2: Purchased Electricity (Market-Based) + +\- Scope 3.1: Purchased Goods and Services + +\- Scope 3.4: Upstream Transportation and Distribution + +The policy is designed to be dynamic, allowing companies to assign entities to organizations, assets(facilities, vehicles, equipment, etc.) to entities, and GHG sources to assets/EGOs in a hierarchical structure to dynamically tailor the policy and inventory to specific corporate structures and operations. ### Need and Use for the GHGP Corporate Standard Policy -According to the IPCC, in order to avoid potentially irreversible impacts of climate change, global GHG emissions should be reduced by approximately 45% by 2030 (relative to 2010 levels) and achieve net zero by around 2050. Therefore, it comes as no surprise that the largest companies in the world are increasingly aligning their GHG reduction targets with the latest scientific models, in an effort to both exhibit their commitment to sustainability, as well as to remain viable in a low-carbon future. The number of companies working with the Science Based Targets initiative (SBTi) has increased nearly 1,900% between 2015 and 2020, with 1,039 cumulatively committed companies representing nearly 20% of global market capitalization (over $20.5 trillion USD). +According to the IPCC, in order to avoid potentially irreversible impacts of climate change, global GHG emissions should be reduced by approximately 45% by 2030 (relative to 2010 levels) and achieve net zero by around 2050. Therefore, it comes as no surprise that many of the largest companies in the world are increasingly aligning their GHG reduction targets with the latest scientific studies, in an effort to both exhibit their commitment to sustainability, as well as to remain viable in a low-carbon future. The number of companies working with the Science Based Targets initiative (SBTi) has increased nearly 1,900% between 2015 and 2020, with 1,039 cumulatively committed companies representing nearly 20% of global market capitalization (over $20.5 trillion USD). + +In addition to momentum in voluntary GHG commitments, there are several new regulations requiring the measuring and reporting of GHG emissions (outlined below): + +The Securities and Exchange Commission (SEC) Climate Disclosure Rules: On March 6th, 2024, the SEC adopted rules to enhance and standardize climate-related disclosures by public companies and in public offerings. The rules will require registrants to disclose climate-related risks, targets, mitigating strategic efforts, and \[for large, accelerated filers (LAFs) and accelerated filers (AFs) that are not otherwise exempted] scope 1 and 2 GHG emissions. + +The European Union Corporate Sustainability Reporting Directive (CSRD): On January 5th, 2023, the EU’s Corporate Sustainability Reporting Directive (CSRD) took effect, strengthening existing rules on social and environmental reporting \[including climate risks and impacts]. The rules will now apply to a broader set of companies, as well as non-EU companies generating over EUR 150 million on the EU market. -The increase in corporate and organizational commitments to measure, disclose, and reduce GHG emissions is likely to continue to increase for the foreseeable future as stakeholders, investors, and regulators place a stronger focus on climate impacts and performance. The United Nations Principles for Responsible Investment (UN PRI) forecasts a “general acceleration in \[climate] policy responses to 2025, driven in part by continuing pressure for change.” On March 21, 2022, the SEC proposed rules to enhance and standardize climate-related disclosures for investors. The proposed rule changes would require registrants to include certain climate-related disclosures in their registration statements and periodic reports. +California Climate Corporate Data Accountability Act: On Oct. 7th, 2023. California Gov. Gavin Newsom signed into law California’s Climate Corporate Data Accountability Act, requiring corporations that do business in California, with annual revenues over $1 billion, to publicly disclose scope 1 and scope 2 GHG emissions beginning in 2026, and scope 3 emissions in 2027. -Despite a growing interest in measuring, disclosing, and reducing GHG emissions from corporations, regulators, and investors alike, companies are struggling to accurately measure and report emissions. In general, current quantification methodologies are flawed, GHG accounting standards leave significant room for error, access to quality data is low, and there is a prevailing lack of GHG accounting expertise. As a result, high-profile companies have been exposed for incorrect GHG inventories and worse, misleading claims on carbon performance. According to an article by Bloomberg, ‘Corporate Greenhouse Gas Data Doesn’t Always Add Up,’ “As companies rush to set climate goals, some aren’t even getting the basics quite right when it comes to accounting for greenhouse-gas emissions. According to researchers in Ireland, the U.K. and Germany, based on decade’s worth of corporate emissions data, “when the numbers were tallied, many didn’t add up. They found instances of errors, omissions and rounding issues (often down rather than up).” +Despite a growing interest in measuring, disclosing, and reducing GHG emissions from corporations, regulators, and investors alike, companies are struggling to accurately measure and report emissions. In general, current quantification methodologies are flawed, GHG accounting standards leave significant room for error, access to quality data is low, and there is a prevailing lack of GHG accounting expertise. As a result, high-profile companies have been exposed for incorrect GHG inventories and worse, misleading claims on carbon performance. According to an article by Bloomberg, ‘Corporate Greenhouse Gas Data Doesn’t Always Add Up,’ “As companies rush to set climate goals, some aren’t even getting the basics quite right when it comes to accounting for greenhouse-gas emissions. According to researchers in Ireland, the U.K. and Germany, based on decade’s worth of corporate emissions data, “when the numbers were tallied, many didn’t add up. They found instances of errors, omissions and rounding issues (often down rather than up).” -The Guardian GHGP Corporate Policy offers a unique technical opportunity for companies to streamline, add robustness, and build trust and transparency into their GHG inventories. The policy allows user to dynamically add entities and EGOs to organizations and GHG sources to EGOs to build their inventories in alignment with their specific corporate structures. MRV data can then be sourced by the Guardian automatically (e.g., via API, IoT-enabled devices, etc.) or provided manually depending on the user’s level of digitization. The inventory is further streamlined through Guardian policies with built in auto-calculation blocks, emission factors, and GWPs. The results of the inventory can be immutably and transparently verified by third parties. Finally, the emissions are tokenized to allow for enhanced tracking, transparency, accounting, and reporting, with the results and data structured in accordance with GHGP reporting requirements. +The Guardian GHGP Corporate Policy offers a unique technical opportunity for companies to streamline, add robustness, and build trust and transparency into their GHG inventories. The policy allows users to dynamically add entities and assets to organizations and GHG sources to assets to build their inventories in alignment with their specific corporate and operational structures. MRV data can then be sourced by the Guardian automatically (e.g., via API, IoT-enabled devices, etc.) or provided manually depending on the user’s level of digitization. The inventory is further streamlined through Guardian policies with built in auto-calculation blocks, emission factors, and GWPs. The results of the inventory can be immutably and transparently verified by independent third parties. Finally, the emissions are tokenized to allow for enhanced tracking, transparency, accounting, and reporting, with the results and data structured in accordance with GHGP reporting requirements. ### Policy Workflow -
+
### Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Latest Version - 1707206253.006698003 @@ -62,16 +84,17 @@ Latest Version - 1707206253.006698003 **Organization** – Company or other organization generating, quantifying, and reporting GHG emissions. The organization also provides MRV data and receives CETs. -**VVB (Validation & Verification Body)** – Independent third party who audits organization’s critical documentation, MRV data and sources, and GHG inventories. Verification is optional for this policy as it is optional under the GHGP Corporate Standard. The workflow steps involving the VVBs will not block the subsequent steps or the minting of CETs, therefore they can be 1) executed according to the workflow above, 2) skipped, or 3) executed later in the workflow. +**VVB (Validation & Verification Body)** – Independent third party who audits organization’s critical documentation, MRV data and sources, and GHG inventories. Verification is optional for this policy as it is (as of this writing) optional under the GHGP Corporate Standard. The workflow steps involving the VVBs will not block the subsequent steps or the minting of CETs, therefore they can be 1) executed according to the workflow above, 2) skipped, or 3) executed later in the workflow. ### Important Documents & Schemas 1. Organizational Profile – The company or organization creates a profile of key information, targets, and reporting metrics. Entities (such as business units, subsidiaries, etc.) are assigned to the company or organization. -2. Entity Schema – The company profiles with key information for each entity. Assets and EGOs (such as facilities, vehicles, etc.) are assigned to entities. Together, the entities make up the corporate structure by which the inventory is based. +2. Entity Schema – The company profiles with key information for each entity. Assets (such as facilities, vehicles, etc.) are assigned to entities. Together, the entities make up the corporate structure by which the inventory is based. 3. Asset Schema — Information on company assets provided by the organization and GHG sources (such grid electricity, fuel consumption, etc.) are assigned to assets. 4. Source Schema — Aggregated activity data (such electrical consumption, fuel consumption, etc.) which are used to auto-calculate GHG emissions. 5. Raw Data Schema — Raw activity data sourced manually or automatically from APIs or devices such as IoT-enabled electricity meters. 6. Reporting Metrics — Key metrics to support reporting in alignment with the GHGP Corporate Standard. +7. Product Carbon Footprint (PCF): Data fields outlined by the Pathfinder Framework v 2.0. The PCF is publishable and can be referenced by supply chain partners to support improved scope 3 calculations. ### Token (Carbon Emission) @@ -101,11 +124,11 @@ The Organization is responsible for inputting key data and information and assig
-5. If the company or organization has already set GHG reduction targets, add them here and input the required and applicable details. It is generally best practice to set targets in alignment with the Science Based Targets Initiative (SBTi). +5. If the company or organization has already set GHG reduction targets, add them here and input the required and applicable details. It is generally best practice to set targets in alignment with the Science Based Targets initiative (SBTi).
-6. For each organizational entity, add all assets/EGOs that generate emissions (e.g., facilities, fleet vehicles, etc.). +6. For each organizational entity, add all assets that generate emissions (e.g., facilities, fleet vehicles, etc.).
@@ -113,12 +136,14 @@ The Organization is responsible for inputting key data and information and assig
-7. For each asset/EGO, add all applicable GHG sources and input the required and applicable fields. For example, common GHG sources for facilities are electricity, natural gas, and refrigerant consumption +7. For each asset, add all applicable GHG sources and input the required and applicable fields. For example, common GHG sources for facilities are electricity, natural gas, and refrigerant consumption
+
+ 8. For each GHG source, enter activity data such as electricity consumption (kWh) for facilities or distance traveled (miles) for vehicles. MRV data can be provided manually by the organization, or sourced automatically from APIs or verified monitoring devices such as IoT-enabled meters that are assigned to specific GHG sources. During this step, market-based instruments such as Renewable Energy Certificates (RECs) can be allocated to applicable GHG sources.
@@ -135,33 +160,31 @@ The Organization is responsible for inputting key data and information and assig
-10. Under the Token History tab, the user can view the Trust Chain, where all the collective VCs can be viewed. - -
+10. Once the reporting metrics have been calculated, a VVB can be assigned, and the metrics can be submitted for verification. This step is currently optional. -
+
-### VVB Flow +11. If applicable, the organization can create and publish digital PCFs in alignment with the Pathfinder Framework v2.0. The PCF is based on all emissions attributed to a specific product/service IDs and made relative to the declared unit. -1. Assign role as VVB and add VVB name +
-
+
-
+Supply chain partners can reference the PCF to support their scope 3 calculations. -2. All the GHG resources once viewed can be verified. +
-
+12. Under the Token History tab, the user can view the Trust Chain, where all the collective VCs can be viewed. -
+
-
+
-
+### VVB Flow -3. After verification of GHG resources, VVB reviews reporting metrics and approve/reject. +1. Once the organization assigns a VVB, the VVB will be able to approve or reject MRV data and GHG sources. The VVB will also be able to approve or reject the GHG inventory reporting metrics after it’s calculated by the Guardian. These steps are optional and can be skipped or executed later in the workflow. -
+
### Administrator (Registry) @@ -169,18 +192,10 @@ The Organization is responsible for inputting key data and information and assig
-2. Once it is approved, tokens are minted as shown: - -
- ### Futureproofing (Automated GHG Inventories) -Due to several factors such as lack of expertise, absent third-party assurance, and methodologies that leave significant room for error, corporate GHG inventories are often inaccurate and unreliable. In addition, manually collecting monitoring and activity data each year can be a cumbersome task. By automating and digitizing the collection of monitoring data, GHG quantification calculations, and (optionally) third-party verification of devices, data, and calculations, GHG inventories can be automated and streamlined to enhance trust, transparency, and efficiency. - -### TODO - -The policy currently covers emissions from grid electricity consumption (location and market-based calculations), natural gas consumption, mobile combustion, and refrigerants. In future iterations of the policy, GHG source schemas can be modulated, and new types of data sources can be added as necessary. The policy is designed to be dynamic, allowing organizations to assign entities to organizations, assets/EGOs to entities, and GHG sources to assets/EGOs in a hierarchical structure to dynamically tailor the inventory to specific corporate structures and operations. This aspect may also be modulated in future iterations of the policy. +In future iterations of the policy, GHG source schemas can be modulated, and new types of data sources can be added as necessary. In addition, new GHG source categories will be added, as well as an option to add custom source schemas and emission factors. These may be added to the Guardian policy in the form of policy modules. -The initial version of the GHGP Corporate Policy is includes schemas for some of the most common assets/EGOs (facilities and vehicles) and GHG sources (scope 1 - natural gas consumption, scope 1 - refrigerants, scope 1 - mobile combustion of fuel, and scope 2 - electrical consumption). However, there are other potential GHG sources that may be applicable to specific corporate GHG inventories. Further, scope 3 emissions are currently optional under the GHGP Corporate Standard, although companies may choose to include them as well. Going forward, schemas may be added for additional GHG sources and scope 3 GHG sources. These may be added to the Guardian policy in the form of policy modules. +The policy is designed to be dynamic, allowing organizations to assign entities to organizations, assets to entities, and GHG sources to assets in a hierarchical structure to dynamically tailor the inventory to specific corporate structures and operations. This aspect may also be modulated in future iterations of the policy. -1. [https://ghgprotocol.org/companies-and-organizations](https://ghgprotocol.org/companies-and-organizations) ↑ +GHGP v3 will include an optional tool to manage disclosures to the SEC in alignment with their Climate Disclosure Rules. diff --git a/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard.md b/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard.md index 8c654a970..e83f964da 100644 --- a/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard.md +++ b/docs/guardian/demo-guide/carbon-emissions/ghgp-corporate-standard.md @@ -60,7 +60,7 @@ The Guardian GHGP Corporate Policy offers a unique technical opportunity for com ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. Latest Version - 1695216161.052983914 diff --git a/docs/guardian/demo-guide/carbon-offsets/ams-i.e-switch-from-non-renewable-biomass-for-thermal-applications-by-the-user.md b/docs/guardian/demo-guide/carbon-offsets/ams-i.e-switch-from-non-renewable-biomass-for-thermal-applications-by-the-user.md index 5b55d78c7..fc3218c34 100644 --- a/docs/guardian/demo-guide/carbon-offsets/ams-i.e-switch-from-non-renewable-biomass-for-thermal-applications-by-the-user.md +++ b/docs/guardian/demo-guide/carbon-offsets/ams-i.e-switch-from-non-renewable-biomass-for-thermal-applications-by-the-user.md @@ -72,7 +72,7 @@ Various methodologies are used to quantify emissions reductions in cookstove pro ### Policy Import -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### Available Roles diff --git a/docs/guardian/demo-guide/carbon-offsets/cdm-acm0006-electricity-and-heat-generation-from-biomass.md b/docs/guardian/demo-guide/carbon-offsets/cdm-acm0006-electricity-and-heat-generation-from-biomass.md index 34c277947..36be4cc87 100644 --- a/docs/guardian/demo-guide/carbon-offsets/cdm-acm0006-electricity-and-heat-generation-from-biomass.md +++ b/docs/guardian/demo-guide/carbon-offsets/cdm-acm0006-electricity-and-heat-generation-from-biomass.md @@ -36,7 +36,7 @@ In the modern landscape of emission reduction initiatives, the value of transpar ### Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### Available Roles diff --git a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-i.c.-thermal-energy-production-with-or-without-electricity.md b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-i.c.-thermal-energy-production-with-or-without-electricity.md index e5bca935b..73f4cc4b1 100644 --- a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-i.c.-thermal-energy-production-with-or-without-electricity.md +++ b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-i.c.-thermal-energy-production-with-or-without-electricity.md @@ -44,7 +44,7 @@ This policy is a digitized version of the CDM’s methodology AMS IC: Thermal e ### Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### Available Roles diff --git a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-ii.g.md b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-ii.g.md index 8533dd653..0be0d9b88 100644 --- a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-ii.g.md +++ b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-ii.g.md @@ -54,7 +54,7 @@ AMS-II.G provides a standardized and measurable framework for clean cooking proj ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### **Available Roles** diff --git a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-iii.h.-methane-recovery-in-wastewater-treatment.md b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-iii.h.-methane-recovery-in-wastewater-treatment.md index 72bf0ebab..c990b1df5 100644 --- a/docs/guardian/demo-guide/carbon-offsets/cdm-ams-iii.h.-methane-recovery-in-wastewater-treatment.md +++ b/docs/guardian/demo-guide/carbon-offsets/cdm-ams-iii.h.-methane-recovery-in-wastewater-treatment.md @@ -36,7 +36,7 @@ In the modern landscape of emission reduction initiatives, transparency and cred ## Policy Guide -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ## Available Roles diff --git a/docs/guardian/demo-guide/carbon-offsets/goldstandard-metered-energy-cooking.md b/docs/guardian/demo-guide/carbon-offsets/goldstandard-metered-energy-cooking.md index 14a239292..0804ee80c 100644 --- a/docs/guardian/demo-guide/carbon-offsets/goldstandard-metered-energy-cooking.md +++ b/docs/guardian/demo-guide/carbon-offsets/goldstandard-metered-energy-cooking.md @@ -59,7 +59,7 @@ This approach is more precise than traditional methodologies, which rely on more ### Policy Guide -This policy is published to Hedera network and can either be imported via Github(.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github(.policy file) or IPFS timestamp. Latest Version - 0.0.3 Hedera Topic - [0.0.3972127](https://explore.lworks.io/testnet/topics/0.0.3972127) diff --git a/docs/guardian/demo-guide/carbon-offsets/verra-vmr0006-energy-efficiency-and-fuel-switch-measures-in-thermal-applications.md b/docs/guardian/demo-guide/carbon-offsets/verra-vmr0006-energy-efficiency-and-fuel-switch-measures-in-thermal-applications.md index bfa40cf31..ed704ef5c 100644 --- a/docs/guardian/demo-guide/carbon-offsets/verra-vmr0006-energy-efficiency-and-fuel-switch-measures-in-thermal-applications.md +++ b/docs/guardian/demo-guide/carbon-offsets/verra-vmr0006-energy-efficiency-and-fuel-switch-measures-in-thermal-applications.md @@ -62,7 +62,7 @@ Various methodologies are used to quantify emissions reductions in cookstove pro ### Policy Import -This policy is published to Hedera network and can either be imported via Github (.policy file) or IPSF timestamp. +This policy is published to Hedera network and can either be imported via Github (.policy file) or IPFS timestamp. ### Available Roles diff --git a/docs/guardian/readme/getting-started/installation/upgrading.md b/docs/guardian/readme/getting-started/installation/upgrading.md index c7ddec33b..97e2b41ad 100644 --- a/docs/guardian/readme/getting-started/installation/upgrading.md +++ b/docs/guardian/readme/getting-started/installation/upgrading.md @@ -1,4 +1,4 @@ -# ⬆ Upgrading +# ⬆️ Upgrading ## Introduction @@ -38,8 +38,6 @@ The migration process guides the team to produce artifacts that will help to cor In this case the migration that we account for is an homogeneous migration: a migration from source databases to target databases where the source and target databases are of the same database management system. During upgrading the system, the schemas for the source and target databases are almost identical except for changes in some of the fields, collections and documents. For changing data the source databases must be transformed during migration. -
- #### 1) Data Migration Profiling: Without a good understanding of the Data model the organization could run into a critical flaw that halts the system and brings Guardian to stop for data corruption and inconsistency. This phase would have “Data Migration Model” as output. This document outlines all the data that needs to be migrated, the complete mapping between the Data Source and Data Destination and every transformation in terms of: @@ -257,7 +255,7 @@ Service1 2.1.3 is compatible with only with version 3.2.x of service3 and just b Service 2.1.3 is backward compatible with with all versions of service6 until 4.x.x -Service 3.2.3 ……\\ +Service 3.2.3 …… |


| **service1** | **service2** | **service3** | **service4** | **service5** | **service6** | | -------------------------------------------------------------- | ------------ | ------------------------ | ----------------------- | ------------ | ------------ | ------------ | @@ -350,8 +348,6 @@ Guardian migration consists of a small script that runs as the first step of eve Guardian already deals with this problem: Due to the long-term nature of some sustainability projects, Policy Engine (PE) maintains unlimited ‘read’ backward compatibility with 'old’ schema definition language elements. In other words, new PE versions will recognize and be able to process all existing valid policies with schemas defined starting from the beginning of Guardian existence. ([https://docs.hedera.com/guardian/guardian/standard-registry/schemas/schema-versioning-and-deprecation-policy](https://docs.hedera.com/guardian/guardian/standard-registry/schemas/schema-versioning-and-deprecation-policy)) -
- **Guardian dials with Schema breaking changes** * Removing or renaming an element; @@ -402,8 +398,6 @@ All microservices should make it clear what version of a different microservice A good way of versioning is through semantic versioning, that is, keeping versions as a set of numbers that make it clear when a breaking change happens (for instance, one number can mean that the API has been modified). -
- **Version Technique** * URI versioning: In this approach, developers add version information directly to a service's [URI](https://www.techtarget.com/whatis/definition/URI-Uniform-Resource-Identifier), which provides a quick way to identify a specific version of the service by simply glancing at either the [URL or URN](https://www.cbtnuggets.com/blog/technology/networking/networking-basics-whats-the-difference-between-uri-url-and-urn). Here's an example of how that looks: @@ -416,7 +410,7 @@ A good way of versioning is through semantic versioning, that is, keeping versio Application processes can be shut down on purpose or through an unexpected event. An application process should be completely disposable without any unwanted side-effects. Moreover, processes should start quickly. An important part of managing dependencies has to do with what happens when a service is updated to fit new requirements or solve a design issue. **Other microservices may depend on the semantics of the old version or worse: depend on the way data is modeled in the database.**\ -As microservices are developed in isolation, this means a team usually cannot wait for another team to make the necessary changes to a dependent service before going live. The way to solve this is through versioning. **All microservices should make it clear what version of a different microservice they require and what version they are.**\\ +As microservices are developed in isolation, this means a team usually cannot wait for another team to make the necessary changes to a dependent service before going live. The way to solve this is through versioning. **All microservices should make it clear what version of a different microservice they require and what version they are.** #### 6) Microservice apps are expected to run in an execution environment as stateless processes. @@ -443,10 +437,7 @@ Although cloud targets infrastructures, **Azure** and **AWS**, namely, offer the * Azure App Services, optimized for web services **enables the deployment**: * From source code (gain cloud dependency); * From docker image; - * From the docker-compose.yml file (the docker containers are inside a single AppService,single POD, rather than multiple AppServices as one might expect.) - -and - + * From the docker-compose.yml file (the docker containers are inside a single AppService,single POD, rather than multiple AppServices as one might expect.) and * Amazon elastic container registry. At the same time, they offer services that grant direct access to Kubernetes: Azure has its **Azure Kubernetes Service (AKS)** while AWS has Amazon **EKS (and obviously on EC2)**. @@ -489,9 +480,7 @@ For the Guardian Upgrade process the Green Instance will be the copy on which al ### Review the release notes and documentation -Thoroughly review the release notes and documentation provided for the target version. These resources will help you understand the changes, new features, and any potential breaking changes in the upgraded version.\\ - -
+Thoroughly review the release notes and documentation provided for the target version. These resources will help you understand the changes, new features, and any potential breaking changes in the upgraded version. You can find the installation guide and release notes for the target version in the Hedera Guardian [documentation](https://docs.hedera.com/guardian/guardian/readme/getting-started) and in the Guardian [official repository](https://github.com/hashgraph/guardian/releases). @@ -499,7 +488,7 @@ You can find the installation guide and release notes for the target version in It is essential to create a complete backup of the existing Hedera Guardian application and its associated databases before proceeding with the upgrade. This ensures that the application data is safeguarded and can be restored if needed. -Refer to this document, [Backup tool](https://docs.google.com/document/d/1PG7dKgKHigNBS-Bs5lHIdgwvObKzAWwkTLk1XY\_9M0s/edit), for more details. +Refer to this document, [Backup tool](https://docs.google.com/document/d/1PG7dKgKHigNBS-Bs5lHIdgwvObKzAWwkTLk1XY_9M0s/edit), for more details. While backing up consider that until release 2.13.0 environment was described by **.env.docker** @@ -515,7 +504,7 @@ At folder: ./guardian/configs At folder: ./guardian/\/configs/ -Make sure to back up all these files. As for an example, starting from the implementation provided at [Backup tool](https://docs.google.com/document/d/1PG7dKgKHigNBS-Bs5lHIdgwvObKzAWwkTLk1XY\_9M0s/edit): +Make sure to back up all these files. As for an example, starting from the implementation provided at [Backup tool](https://docs.google.com/document/d/1PG7dKgKHigNBS-Bs5lHIdgwvObKzAWwkTLk1XY_9M0s/edit): 1. configure /usr/local/bin to contain the whole guardian tree folders. 2. change line 6 of script configs-backup.sh from: @@ -599,7 +588,7 @@ If the prior version of the Hedera Guardian application has been customized by y #### Identify performance behavior -Collect metrics from the current Guardian running instance to analyze performance, logs, and metrics to identify current instance behavior as of monitoring[ tools](https://docs.hedera.com/guardian/monitoring-tools) available for Guardian since release 2.12.1. +Collect metrics from the current Guardian running instance to analyze performance, logs, and metrics to identify current instance behavior as of [monitoring tools](../../../../guardian-in-production/monitoring-tools.md) available for Guardian since release 2.12.1. ### Tasks Checklist during the upgrade @@ -711,7 +700,7 @@ Once testing is successfully completed: #### Monitor and Rollback if Needed -Continuously monitor the green environment's performance, logs, and metrics to identify any issues or anomalies. Compare the result of previous metrics to the new revealed metrics as per the [monitoring tools](https://docs.hedera.com/guardian/monitoring-tools) available for Guardian since release 2.12.1. +Continuously monitor the green environment's performance, logs, and metrics to identify any issues or anomalies. Compare the result of previous metrics to the new revealed metrics as per the [monitoring tools](../../../../guardian-in-production/monitoring-tools.md) available for Guardian since release 2.12.1. If any critical issues arise, you can quickly rollback by switching the load balancer to route all traffic back to the blue environment. diff --git a/docs/guardian/readme/roadmap.md b/docs/guardian/readme/roadmap.md index 0bfd9bb99..f467b5a2a 100644 --- a/docs/guardian/readme/roadmap.md +++ b/docs/guardian/readme/roadmap.md @@ -1,6 +1,6 @@ # 🛣️ Roadmap -
FeatureRelease monthDevelop branch?Released?Release Version
Development of AMS-I.E and Mass Comparison on CookstoveJuly 2024YesYes2.27
Indexer APIJuly 2024YesYes2.27
Development of VMR0006July 2024YesYes2.27
Filtering data for blocks is stateful API, introduce stateless data filters for API usage.July 2024YesYes2.27
Auto-testing community submitted policiesJuly 2024YesYes2.27
Code audit: support and resolution of issuesOctober 2024YesYes3.0
GHG Scorecards ResearchOctober 2024YesYes3.0
Token action block to work with token templatesOctober 2024YesYes3.0
Different token IDs for different projects by the same policyOctober 2024YesYes3.0
Enhance MongoDB IntegrationOctober 2024YesYes3.0
Leverage the pre-built images as the default way to start Guardian locallOctober 2024YesYes3.0
Global Carbon Council (GCC) GCCM001October 2024YesYes3.0
Default values for schema-defined fieldsOctober 2024YesYes3.0
Rationalize API and UI return error codesOctober 2024YesYes3.0
Simplify default SR schema to take out optional propertiesOctober 2024YesYes3.0
Guardian analytics: bottom-up data traceabilityOctober 2024YesYes3.0
API versioning and support/deprecation scheduleOctober 2024YesYes3.0
Data Parameterization and Conditional Review LogicOctober 2024YesYes3.0
Calculation logic for values in 'automatic fields' in schemasOctober 2024YesYes3.0
Verify and Fix the features that got affected by Mirror node changesOctober 2024YesYes3.0
Climate Action Reserve's U.S. Landfill ProtocolJanuary 2025No
Scope 3/PCF Referencing Demo (Methodology Breakdown)October 2024No
Architecture for REDD+ Transactions (ART) The REDD+ WebinarJanuary 2025No
Development of AMS-I.CJanuary 2025No
API facilities to retrieve unique references (IDs) of results for API-triggered operationsJanuary 2025No
Guardian analytics: labels and top down data way pointsJanuary 2025No
Trustchain support for contract-based issuance and retirement implementationJanuary 2025No
Reviewing and Verifying Atma policyJanuary 2025No
American Carbon Registry (ACR) Methodology WebinarJanuary 2025No
Emissions Reduction/Removals (ERRs) Calculation Pre-Calculator in GuardianJanuary 2025No
Formula Driven Definitions & Schema Tree EnhancementJanuary 2025No
Dry-run policy execution 'savepoints' - restart policy dry-run from the list of 'saved' placesJanuary 2025No
Standardize UI on Angular Material, remove/replace PrimeNGJanuary 2025No
Enhancing Research on Indexer and Analytics Use CasesJanuary 2025No
CLEAR Methodology WebinarJanuary 2025No
+
FeatureRelease monthDevelop branch?Released?Release Version
Development of AMS-I.E and Mass Comparison on CookstoveJuly 2024YesYes2.27
Indexer APIJuly 2024YesYes2.27
Development of VMR0006July 2024YesYes2.27
Filtering data for blocks is stateful API, introduce stateless data filters for API usage.July 2024YesYes2.27
Auto-testing community submitted policiesJuly 2024YesYes2.27
Code audit: support and resolution of issuesOctober 2024YesYes3.0
GHG Scorecards ResearchOctober 2024YesYes3.0
Token action block to work with token templatesOctober 2024YesYes3.0
Different token IDs for different projects by the same policyOctober 2024YesYes3.0
Enhance MongoDB IntegrationOctober 2024YesYes3.0
Leverage the pre-built images as the default way to start Guardian locallOctober 2024YesYes3.0
Global Carbon Council (GCC) GCCM001October 2024YesYes3.0
Default values for schema-defined fieldsOctober 2024YesYes3.0
Rationalize API and UI return error codesOctober 2024YesYes3.0
Simplify default SR schema to take out optional propertiesOctober 2024YesYes3.0
Guardian analytics: bottom-up data traceabilityOctober 2024YesYes3.0
API versioning and support/deprecation scheduleOctober 2024YesYes3.0
Data Parameterization and Conditional Review LogicOctober 2024YesYes3.0
Calculation logic for values in 'automatic fields' in schemasOctober 2024YesYes3.0
Verify and Fix the features that got affected by Mirror node changesOctober 2024YesYes3.0
Climate Action Reserve's U.S. Landfill ProtocolJanuary 2025No
Scope 3/PCF Referencing Demo (Methodology Breakdown)January 2025No
Architecture for REDD+ Transactions (ART) The REDD+ WebinarJanuary 2025No
Development of AMS-I.CJanuary 2025No
API facilities to retrieve unique references (IDs) of results for API-triggered operationsJanuary 2025No
Guardian analytics: labels and top down data way pointsJanuary 2025No
Trustchain support for contract-based issuance and retirement implementationJanuary 2025No
Reviewing and Verifying Atma policyJanuary 2025No
American Carbon Registry (ACR) Methodology WebinarJanuary 2025No
Emissions Reduction/Removals (ERRs) Calculation Pre-Calculator in GuardianJanuary 2025No
Formula Driven Definitions & Schema Tree EnhancementJanuary 2025No
Dry-run policy execution 'savepoints' - restart policy dry-run from the list of 'saved' placesJanuary 2025No
Standardize UI on Angular Material, remove/replace PrimeNGJanuary 2025No
Enhancing Research on Indexer and Analytics Use CasesJanuary 2025No
CLEAR Methodology WebinarJanuary 2025No
{% tabs %} {% tab title="Upcoming Releases" %} diff --git a/frontend/src/app/app-routing.module.ts b/frontend/src/app/app-routing.module.ts index 08096d84c..97490d93f 100644 --- a/frontend/src/app/app-routing.module.ts +++ b/frontend/src/app/app-routing.module.ts @@ -298,7 +298,8 @@ const routes: Routes = [ roles: [UserRole.STANDARD_REGISTRY, UserRole.USER], permissions: [ Permissions.POLICIES_POLICY_READ, - Permissions.POLICIES_POLICY_EXECUTE + Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, ] } }, @@ -309,7 +310,8 @@ const routes: Routes = [ data: { roles: [UserRole.STANDARD_REGISTRY, UserRole.USER], permissions: [ - Permissions.POLICIES_POLICY_EXECUTE + Permissions.POLICIES_POLICY_EXECUTE, + Permissions.POLICIES_POLICY_MANAGE, ] } }, diff --git a/frontend/src/app/app.component.scss b/frontend/src/app/app.component.scss index 1e3cfd0e8..ef16bf5e6 100644 --- a/frontend/src/app/app.component.scss +++ b/frontend/src/app/app.component.scss @@ -357,6 +357,12 @@ body { max-width: 310px; } + .cell-210 { + min-width: 210px; + width: 210px; + max-width: 210px; + } + .cell-190 { min-width: 190px; width: 190px; diff --git a/frontend/src/app/modules/analytics/compare-document/compare-document.component.ts b/frontend/src/app/modules/analytics/compare-document/compare-document.component.ts index 488224247..dd29005d7 100644 --- a/frontend/src/app/modules/analytics/compare-document/compare-document.component.ts +++ b/frontend/src/app/modules/analytics/compare-document/compare-document.component.ts @@ -137,9 +137,10 @@ export class CompareDocumentComponent implements OnInit { if (!this.customColumnSize) { const k = (this.customColumnSize) ? this.customColumnSize : Math.round(100 / this.size); - this._gridStyle = `max(calc(${k}vw - 80px), 680px)`; + const m = Math.round(280 / this.size); + this._gridStyle = `max(calc(${k}vw - 120px - ${m}px), 695px)`; for (let i = 1; i < this.size; i++) { - this._gridStyle += ` 35px max(calc(${k}vw - 45px), 720px)`; + this._gridStyle += ` 35px max(calc(${k}vw - 90px - ${m}px), 725px)`; } } diff --git a/frontend/src/app/modules/common/models/field-rule-validator.ts b/frontend/src/app/modules/common/models/field-rule-validator.ts index 2adb34293..16a520db6 100644 --- a/frontend/src/app/modules/common/models/field-rule-validator.ts +++ b/frontend/src/app/modules/common/models/field-rule-validator.ts @@ -331,8 +331,8 @@ export class SchemaRuleValidators { public schemas: Set; public validators: SchemaRuleValidator[]; - constructor(data: any[]) { - this.validators = data.map((v) => new SchemaRuleValidator(v)); + constructor(data: any[] | null) { + this.validators = (data || []).map((v) => new SchemaRuleValidator(v)); this.schemas = new Set(); for (const validator of this.validators) { for (const iri of validator.schemas) { diff --git a/frontend/src/app/modules/policy-engine/policies/policies.component.html b/frontend/src/app/modules/policy-engine/policies/policies.component.html index 1cfd46d7f..9564cc776 100644 --- a/frontend/src/app/modules/policy-engine/policies/policies.component.html +++ b/frontend/src/app/modules/policy-engine/policies/policies.component.html @@ -133,8 +133,7 @@ pFrozenColumn>Operations Description + class="header-cell-text cell-description">Description Status @@ -160,8 +159,8 @@ *ngIf="canDisplayColumn('schemas')" class="header-cell-text cell-170">Schemas Multi Policy + *ngIf="canDisplayColumn('multi-instance')" + class="header-cell-text cell-210">Multi Policy @@ -180,14 +179,14 @@ [class.cell-last]="!canDisplayColumn('operations')" pFrozenColumn > -
+
-
- double_arrow - Go -
+ +
+ double_arrow + Go +
-
+
+
diff --git a/frontend/src/app/modules/policy-engine/policy-viewer/blocks/request-document-block/request-document-block.component.html b/frontend/src/app/modules/policy-engine/policy-viewer/blocks/request-document-block/request-document-block.component.html index 79d193c3c..f7994046d 100644 --- a/frontend/src/app/modules/policy-engine/policy-viewer/blocks/request-document-block/request-document-block.component.html +++ b/frontend/src/app/modules/policy-engine/policy-viewer/blocks/request-document-block/request-document-block.component.html @@ -25,7 +25,8 @@

{{ title }}

There is some data to restore. You can restore latest values:
+ - -
{{ item.unit }}
-
-
-
-
-
-
-
@@ -122,9 +124,9 @@
@@ -138,19 +140,19 @@
{{item.unit}}
-
- -
@@ -172,22 +174,24 @@
-
Suggestion: {{item.suggest}}
-
+
-
{{ item.unit }}
-
-
-
-
-
-
- @@ -271,7 +275,7 @@ class="guardian-dropdown" [formControl]="listItem.control" [options]="item.enumValues" - [showClear]="false" + [showClear]="false" appendTo="body" placeholder="Not selected" panelStyleClass="guardian-dropdown-panel"> @@ -285,12 +289,12 @@
{{item.unit}}
- +
-
- @@ -301,9 +305,9 @@
{{getInvalidMessageByFieldType(item)}}
-
+
@@ -354,28 +360,28 @@
- - - @@ -384,7 +390,7 @@
-
+
@@ -426,7 +434,8 @@ Please make sure all fields in schemas contain a valid value
-
@@ -440,9 +449,9 @@
- @@ -454,28 +463,28 @@
- - - @@ -489,9 +498,9 @@
- diff --git a/frontend/src/app/modules/schema-engine/schema-form/schema-form.component.ts b/frontend/src/app/modules/schema-engine/schema-form/schema-form.component.ts index 924662d05..292a28b95 100644 --- a/frontend/src/app/modules/schema-engine/schema-form/schema-form.component.ts +++ b/frontend/src/app/modules/schema-engine/schema-form/schema-form.component.ts @@ -51,6 +51,7 @@ enum ErrorArrayMessageByFieldType { class IButton { id: string; visible: () => boolean; + disabled: () => boolean; text: string; class: string; type: string; @@ -144,6 +145,9 @@ export class SchemaFormComponent implements OnInit { } return this.currentIndex === 0 && !this.cancelHidden; }, + disabled: () => { + return false; + }, text: this.cancelText, class: 'p-button-outlined', type: 'secondary', @@ -159,6 +163,9 @@ export class SchemaFormComponent implements OnInit { } return this.currentIndex !== 0; }, + disabled: () => { + return false; + }, text: 'Previous', class: 'p-button-outlined', type: 'secondary', @@ -174,6 +181,9 @@ export class SchemaFormComponent implements OnInit { } return !this.isShown[this.fields.length - 1]; }, + disabled: () => { + return false; + }, text: 'Next', class: 'p-button', type: 'primary', @@ -189,6 +199,9 @@ export class SchemaFormComponent implements OnInit { } return !!this.isShown[this.fields.length - 1] && !this.submitHidden; }, + disabled: () => { + return false; + }, text: this.submitText, class: 'p-button', type: 'primary', @@ -652,6 +665,22 @@ export class SchemaFormComponent implements OnInit { return item.type === 'null'; } + suggestIsObject(item: any): boolean { + return typeof item === 'object'; + } + + public parseSuggest(item: any): string { + return this.findString(item); + } + + private findString(item: any): string { + if (typeof item === 'object') { + return this.findString(Object.values(item)[0]); + } else { + return item as string; + } + } + public isTime(item: SchemaField): boolean { return item.type === 'string' && item.format === 'time'; } diff --git a/frontend/src/app/modules/schema-engine/vc-dialog/vc-dialog.component.html b/frontend/src/app/modules/schema-engine/vc-dialog/vc-dialog.component.html index 5291f5d01..a545ac396 100644 --- a/frontend/src/app/modules/schema-engine/vc-dialog/vc-dialog.component.html +++ b/frontend/src/app/modules/schema-engine/vc-dialog/vc-dialog.component.html @@ -8,7 +8,10 @@
-
+
+
+
+
{ - if (data.schema) { - const document = new Schema(data.schema).document; + this.loading = true; + const requests = [this.profileService.getProfile()]; - this.json = document ? JSON.stringify((document), null, 4) : '' - this.document = document - } - }); + if (id && topicId && category) { + requests.push(this.schemaService.getSchemaWithSubSchemas(category, id, topicId)); } + + forkJoin(requests).subscribe(([profile, data]: any[]) => { + this.user = new UserPermissions(profile); + if (data && data.schema) { + const document = new Schema(data.schema).document; + this.json = document ? JSON.stringify((document), null, 4) : '' + this.document = document + } + this.loading = false; + }, (error) => { + this.loading = false; + console.error(error); + }); } } diff --git a/frontend/src/app/utils/permissions-interface.ts b/frontend/src/app/utils/permissions-interface.ts index 7ef2295b3..1eaf07544 100644 --- a/frontend/src/app/utils/permissions-interface.ts +++ b/frontend/src/app/utils/permissions-interface.ts @@ -121,7 +121,8 @@ export const entityNames = new Map([ [PermissionEntities.THEME, 'Theme'], [PermissionEntities.TOKEN, 'Token'], [PermissionEntities.TRUST_CHAIN, 'Trust Chain'], - [PermissionEntities.ROLE, 'Role'] + [PermissionEntities.ROLE, 'Role'], + [PermissionEntities.STATISTIC, 'Statistic'] ]) export const actionIndexes = new Map([ diff --git a/frontend/src/app/views/new-header/new-header.component.html b/frontend/src/app/views/new-header/new-header.component.html index 9a7f239ca..6336689c3 100644 --- a/frontend/src/app/views/new-header/new-header.component.html +++ b/frontend/src/app/views/new-header/new-header.component.html @@ -57,17 +57,6 @@
-
- - - Balance - {{ balance }} - -
-
-
-
+
+ + Balance {{ balance }} +
+
+
+
+
span { + display: flex; + justify-content: space-between; + width: 100%; + } + + &.collapsed-item { + div, + span { + display: none; + } + } +} + + .boundary-box { height: 78px; display: flex; diff --git a/frontend/src/app/views/schemas/schemas.component.html b/frontend/src/app/views/schemas/schemas.component.html index b4a8a1e9b..38fc22ea5 100644 --- a/frontend/src/app/views/schemas/schemas.component.html +++ b/frontend/src/app/views/schemas/schemas.component.html @@ -230,7 +230,7 @@
-
+
-
+
-
+
-
+
{ const map: { [key: string]: IRateMap> } = {}; if (document1) { for (const item of document1.getFieldsList()) { - map[item.key] = { left: item, right: null }; - list.push(item.key); + if (map[item.key]) { + map[item.key] = { left: item, right: null }; + } else { + map[item.key] = { left: item, right: null }; + list.push(item.key); + } } } if (document2) { diff --git a/guardian-service/src/api/analytics.service.ts b/guardian-service/src/api/analytics.service.ts index 2a04244b8..dfe21feb7 100644 --- a/guardian-service/src/api/analytics.service.ts +++ b/guardian-service/src/api/analytics.service.ts @@ -30,7 +30,9 @@ import { VpDocument as VpDocumentCollection, VcDocument as VcDocumentCollection, Workers, - PinoLogger + PinoLogger, + VpDocument, + getVCField } from '@guardian/common'; import { ApiResponse } from '../api/helpers/api-response.js'; import { IOwner, MessageAPI, PolicyType, UserRole, WorkerTaskType } from '@guardian/interfaces'; @@ -56,6 +58,13 @@ interface ISearchResult { tags: string[] } +function getAmount(vp: VpDocument): number { + const vcs = vp.document.verifiableCredential || []; + const mintIndex = Math.max(1, vcs.length - 1); + const mint = vcs[mintIndex]; + return Number(getVCField(mint, 'amount')) || 0; +} + async function localSearch( user: IOwner, type: string, @@ -82,7 +91,14 @@ async function localSearch( }); } else { filter.$and.push({ - owner: user.creator, + $or: [ + { + owner: user.creator, + }, + { + creator: user.creator, + }, + ], hash: { $exists: true, $ne: null } }); } @@ -119,7 +135,8 @@ async function localSearch( policies = policies.filter((policy) => policy.vpCount >= options.minVpCount); } for (const policy of policies) { - policy.tokensCount = 0; + const vps = await dataBaseServer.find(VpDocumentCollection, { policyId: policy.id }); + policy.tokensCount = vps.map((vp) => getAmount(vp)).reduce((sum, amount) => sum + amount, 0); } if (options.minTokensCount) { policies = policies.filter((policy) => policy.tokensCount >= options.minTokensCount); diff --git a/guardian-service/src/api/contract.service.ts b/guardian-service/src/api/contract.service.ts index 0968d31df..2ac2bde1e 100644 --- a/guardian-service/src/api/contract.service.ts +++ b/guardian-service/src/api/contract.service.ts @@ -1328,7 +1328,7 @@ export async function contractAPI( type: TopicType.ContractTopic, name: TopicType.ContractTopic, description: TopicType.ContractTopic, - owner: owner.creator, + owner: owner.owner, policyId: null, policyUUID: null, }, @@ -1355,7 +1355,7 @@ export async function contractAPI( ); const contract = await dataBaseServer.save(Contract, { contractId, - owner: owner.creator, + owner: owner.owner, description, permissions: type === ContractType.WIPE ? (version !== '1.0.0' ? 7 : 15) @@ -1380,7 +1380,7 @@ export async function contractAPI( .setTopicObject(topic) .sendMessage(contractMessage); const userTopic = await TopicConfig.fromObject( - await DatabaseServer.getTopicByType(owner.creator, TopicType.UserTopic), + await DatabaseServer.getTopicByType(owner.owner, TopicType.UserTopic), true ); await topicHelper.twoWayLink(topic, userTopic, contractMessageResult.getId()); @@ -1436,7 +1436,7 @@ export async function contractAPI( Contract, { contractId, - owner: owner.creator, + owner: owner.owner, description, permissions, topicId: memo, @@ -1454,7 +1454,7 @@ export async function contractAPI( }, { contractId, - owner: owner.creator, + owner: owner.owner, } ) as Contract & { contractId: string; version: string; }; if ( diff --git a/guardian-service/src/api/helpers/tool-import-export-helper.ts b/guardian-service/src/api/helpers/tool-import-export-helper.ts index b88316c23..254192d71 100644 --- a/guardian-service/src/api/helpers/tool-import-export-helper.ts +++ b/guardian-service/src/api/helpers/tool-import-export-helper.ts @@ -384,14 +384,14 @@ export async function importToolByFile( notifier.completedAndStart('Create topic'); const parent = await TopicConfig.fromObject( - await DatabaseServer.getTopicByType(user.creator, TopicType.UserTopic), true + await DatabaseServer.getTopicByType(user.owner, TopicType.UserTopic), true ); const topicHelper = new TopicHelper(root.hederaAccountId, root.hederaAccountKey, root.signOptions); const topic = await topicHelper.create({ type: TopicType.ToolTopic, name: tool.name || TopicType.ToolTopic, description: tool.description || TopicType.ToolTopic, - owner: user.creator, + owner: user.owner, targetId: null, targetUUID: null }, { admin: true, submit: true }); diff --git a/guardian-service/src/api/module.service.ts b/guardian-service/src/api/module.service.ts index 5b710ec50..bfb4a2a06 100644 --- a/guardian-service/src/api/module.service.ts +++ b/guardian-service/src/api/module.service.ts @@ -127,11 +127,11 @@ export async function publishModule( logger.info('Publish module', ['GUARDIAN_SERVICE']); notifier.start('Resolve Hedera account'); const users = new Users(); - const root = await users.getHederaAccount(user.creator); + const root = await users.getHederaAccount(user.owner); notifier.completedAndStart('Find topic'); const userTopic = await TopicConfig.fromObject( - await DatabaseServer.getTopicByType(user.creator, TopicType.UserTopic), + await DatabaseServer.getTopicByType(user.owner, TopicType.UserTopic), true ); const messageServer = new MessageServer(root.hederaAccountId, root.hederaAccountKey, root.signOptions) @@ -143,7 +143,7 @@ export async function publishModule( type: TopicType.ModuleTopic, name: model.name || TopicType.ModuleTopic, description: TopicType.ModuleTopic, - owner: user.creator, + owner: user.owner, policyId: null, policyUUID: null }); diff --git a/guardian-service/src/api/policy-statistics.service.ts b/guardian-service/src/api/policy-statistics.service.ts index 412c87a78..bad5832cf 100644 --- a/guardian-service/src/api/policy-statistics.service.ts +++ b/guardian-service/src/api/policy-statistics.service.ts @@ -1,6 +1,6 @@ import { ApiResponse } from './helpers/api-response.js'; -import { BinaryMessageResponse, DatabaseServer, MessageAction, MessageError, MessageResponse, MessageServer, PinoLogger, PolicyImportExport, PolicyStatistic, PolicyStatisticImportExport, StatisticAssessmentMessage, StatisticMessage, Users } from '@guardian/common'; -import { EntityStatus, IOwner, MessageAPI, PolicyType, Schema, SchemaEntity, SchemaStatus } from '@guardian/interfaces'; +import { BinaryMessageResponse, DatabaseServer, MessageAction, MessageError, MessageResponse, MessageServer, PinoLogger, PolicyStatistic, PolicyStatisticImportExport, StatisticAssessmentMessage, StatisticMessage, Users } from '@guardian/common'; +import { EntityStatus, IOwner, MessageAPI, PolicyType, Schema, SchemaEntity } from '@guardian/interfaces'; import { publishSchema } from './helpers/index.js'; import { findRelationships, generateSchema, generateVcDocument, getOrCreateTopic, publishConfig, uniqueDocuments } from './helpers/policy-statistics-helpers.js'; @@ -164,28 +164,13 @@ export async function statisticsAPI(logger: PinoLogger): Promise { return new MessageError('Item does not exist.'); } - const { schemas, toolSchemas } = await PolicyImportExport.loadAllSchemas(policy); - const systemSchemas = await DatabaseServer.getSchemas({ - topicId: policy.topicId, - entity: { $in: [SchemaEntity.MINT_TOKEN, SchemaEntity.MINT_NFTOKEN] } - }); - - const all = [] - .concat(schemas, toolSchemas, systemSchemas) - .filter((s) => s.status === SchemaStatus.PUBLISHED && s.entity !== 'EVC'); + const schemas = await PolicyStatisticImportExport.getPolicySchemas(policy); if (item.status === EntityStatus.PUBLISHED) { const schema = await DatabaseServer.getSchema({ topicId: item.topicId }); - return new MessageResponse({ - policy, - schemas: all, - schema - }); + return new MessageResponse({ policy, schemas, schema }); } else { - return new MessageResponse({ - policy, - schemas: all - }); + return new MessageResponse({ policy, schemas }); } } catch (error) { await logger.error(error, ['GUARDIAN_SERVICE']); @@ -678,6 +663,8 @@ export async function statisticsAPI(logger: PinoLogger): Promise { return new MessageError('Item does not exist.'); } + const schemas = await PolicyStatisticImportExport.getPolicySchemas(policy); + const preview = await PolicyStatisticImportExport.parseZipFile(Buffer.from(zip.data)); const { definition } = preview; @@ -692,6 +679,7 @@ export async function statisticsAPI(logger: PinoLogger): Promise { definition.policyTopicId = policy.topicId; definition.policyInstanceTopicId = policy.instanceTopicId; definition.status = EntityStatus.DRAFT; + definition.config = PolicyStatisticImportExport.updateSchemas(schemas, definition.config); definition.config = PolicyStatisticImportExport.validateConfig(definition.config); const row = await DatabaseServer.createStatistic(definition); return new MessageResponse(row); diff --git a/guardian-service/src/api/projects.service.ts b/guardian-service/src/api/projects.service.ts index 9ca9343ab..8627be4fd 100644 --- a/guardian-service/src/api/projects.service.ts +++ b/guardian-service/src/api/projects.service.ts @@ -26,9 +26,10 @@ export async function getProjectSchema(iri: string, schemas: Map): } else { const schema = await new DatabaseServer().getSchemaByIRI(iri); if (schema) { - const fieldCompanyName = (new Schema(schema)).searchFields((f) => f.title === CompanyNameField); - const fieldSectoralScope = (new Schema(schema)).searchFields((f) => f.title === SectoralScopeField); - const fieldTitle = (new Schema(schema)).searchFields((f) => f.title === ProjectTitleField); + const _schema = new Schema(schema); + const fieldCompanyName = _schema.searchFields((f) => f.property === CompanyNameField); + const fieldSectoralScope = _schema.searchFields((f) => f.property === SectoralScopeField); + const fieldTitle = _schema.searchFields((f) => f.property === ProjectTitleField); schemas.set(iri, { fieldCompanyName, fieldSectoralScope, diff --git a/guardian-service/src/policy-engine/helpers/policy-import-export-helper.ts b/guardian-service/src/policy-engine/helpers/policy-import-export-helper.ts index 038da5605..4f51561ed 100644 --- a/guardian-service/src/policy-engine/helpers/policy-import-export-helper.ts +++ b/guardian-service/src/policy-engine/helpers/policy-import-export-helper.ts @@ -124,7 +124,7 @@ export class PolicyImport { private async createPolicyTopic(policy: Policy, versionOfTopicId: string, user: IOwner) { this.notifier.completedAndStart('Resolve topic'); this.parentTopic = await TopicConfig.fromObject( - await DatabaseServer.getTopicByType(user.creator, TopicType.UserTopic), true + await DatabaseServer.getTopicByType(user.owner, TopicType.UserTopic), true ); if (this.demo) { @@ -132,7 +132,7 @@ export class PolicyImport { type: TopicType.PolicyTopic, name: policy.name || TopicType.PolicyTopic, description: policy.topicDescription || TopicType.PolicyTopic, - owner: user.creator, + owner: user.owner, policyId: null, policyUUID: null, topicId: `0.0.${Date.now()}${(Math.random()*1000).toFixed(0)}` @@ -156,7 +156,7 @@ export class PolicyImport { type: TopicType.PolicyTopic, name: policy.name || TopicType.PolicyTopic, description: policy.topicDescription || TopicType.PolicyTopic, - owner: user.creator, + owner: user.owner, policyId: null, policyUUID: null }); diff --git a/guardian-service/src/policy-engine/policy-engine.service.ts b/guardian-service/src/policy-engine/policy-engine.service.ts index 729900a8c..eb8b557d0 100644 --- a/guardian-service/src/policy-engine/policy-engine.service.ts +++ b/guardian-service/src/policy-engine/policy-engine.service.ts @@ -1563,7 +1563,7 @@ export class PolicyEngineService { throw new Error(`Policy is not in Dry Run`); } - const topic = await DatabaseServer.getTopicByType(owner.creator, TopicType.UserTopic); + const topic = await DatabaseServer.getTopicByType(owner.owner, TopicType.UserTopic); const newPrivateKey = PrivateKey.generate(); const newAccountId = new AccountId(Date.now()); diff --git a/guardian-service/src/policy-engine/policy-engine.ts b/guardian-service/src/policy-engine/policy-engine.ts index 931d93c1c..c781ff9c8 100644 --- a/guardian-service/src/policy-engine/policy-engine.ts +++ b/guardian-service/src/policy-engine/policy-engine.ts @@ -160,18 +160,17 @@ export class PolicyEngine extends NatsService { */ public async accessPolicyCode(policy: Policy, user: IOwner): Promise { if (!policy) { - return 1 + //Policy does not exist + return 1; } if (user.owner !== policy.owner) { - return 2 + //Insufficient permissions + return 2; } if (user.creator === policy.creator) { - return 0 + return 0; } - const published = ( - policy.status === PolicyType.PUBLISH || - policy.status === PolicyType.DISCONTINUED - ); + const published = (policy.status === PolicyType.PUBLISH || policy.status === PolicyType.DISCONTINUED); const assigned = await DatabaseServer.getAssignedEntity(AssignedEntityType.Policy, policy.id, user.creator); switch (user.access) { @@ -191,9 +190,11 @@ export class PolicyEngine extends NatsService { return (published && assigned) ? 0 : 2; } case AccessType.NONE: { + //Insufficient permissions return 2; } default: { + //Insufficient permissions return 2; } } @@ -369,20 +370,20 @@ export class PolicyEngine extends NatsService { let newTopic: Topic; notifier.completedAndStart('Resolve Hedera account'); - const root = await this.users.getHederaAccount(user.creator); + const root = await this.users.getHederaAccount(user.owner); notifier.completed(); if (!model.topicId) { notifier.start('Create topic'); logger.info('Create Policy: Create New Topic', ['GUARDIAN_SERVICE']); const parent = await TopicConfig.fromObject( - await DatabaseServer.getTopicByType(user.creator, TopicType.UserTopic), true + await DatabaseServer.getTopicByType(user.owner, TopicType.UserTopic), true ); const topicHelper = new TopicHelper(root.hederaAccountId, root.hederaAccountKey, root.signOptions); const topic = await topicHelper.create({ type: TopicType.PolicyTopic, name: model.name || TopicType.PolicyTopic, description: model.topicDescription || TopicType.PolicyTopic, - owner: user.creator, + owner: user.owner, policyId: null, policyUUID: null }); @@ -931,7 +932,7 @@ export class PolicyEngine extends NatsService { const databaseServer = new DatabaseServer(dryRunId); //Create Services - const root = await this.users.getHederaAccount(user.creator); + const root = await this.users.getHederaAccount(user.owner); const topic = await TopicConfig.fromObject( await DatabaseServer.getTopicById(model.topicId), !demo ); @@ -951,7 +952,7 @@ export class PolicyEngine extends NatsService { type: TopicType.InstancePolicyTopic, name: model.name || TopicType.InstancePolicyTopic, description: model.topicDescription || TopicType.InstancePolicyTopic, - owner: user.creator, + owner: user.owner, policyId: dryRunId, policyUUID: model.uuid }); @@ -997,11 +998,11 @@ export class PolicyEngine extends NatsService { credentialSubject = SchemaHelper.updateObjectContext(schemaObject, credentialSubject); } const vcHelper = new VcHelper(); - const didDocument = await vcHelper.loadDidDocument(user.creator); + const didDocument = await vcHelper.loadDidDocument(user.owner); const vc = await vcHelper.createVerifiableCredential(credentialSubject, didDocument, null, null); await databaseServer.saveVC({ hash: vc.toCredentialHash(), - owner: user.creator, + owner: user.owner, document: vc.toJsonTree(), type: SchemaEntity.POLICY, policyId: `${model.id}` diff --git a/indexer-service/src/api/entities.service.ts b/indexer-service/src/api/entities.service.ts index 84100a460..45fe194d9 100644 --- a/indexer-service/src/api/entities.service.ts +++ b/indexer-service/src/api/entities.service.ts @@ -533,6 +533,11 @@ export class EntityService { filters, options )) as [Policy[], number]; + for (const row of rows) { + if (row.analytics) { + delete row.analytics.hashMap; + } + } const result = { items: rows, pageIndex: options.offset / options.limit, diff --git a/indexer-service/src/app.ts b/indexer-service/src/app.ts index 17c3aba6c..6f692ec95 100644 --- a/indexer-service/src/app.ts +++ b/indexer-service/src/app.ts @@ -10,7 +10,22 @@ import { EntityService } from './api/entities.service.js'; import { FiltersService } from './api/filters.service.js'; import { LandingService } from './api/landing.service.js'; import { AnalyticsService } from './api/analytics.service.js'; -import { SynchronizationAnalytics, SynchronizationContracts, SynchronizationDid, SynchronizationModules, SynchronizationPolicy, SynchronizationProjects, SynchronizationRegistries, SynchronizationRoles, SynchronizationSchemas, SynchronizationTools, SynchronizationTopics, SynchronizationVCs, SynchronizationVPs, } from './helpers/synchronizers/index.js'; +import { + SynchronizationAnalytics, + SynchronizationContracts, + SynchronizationDid, + SynchronizationModules, + SynchronizationPolicy, + SynchronizationProjects, + SynchronizationRegistries, + SynchronizationRoles, + SynchronizationSchemas, + SynchronizationTools, + SynchronizationTopics, + SynchronizationVCs, + SynchronizationVPs, + SynchronizationAll +} from './helpers/synchronizers/index.js'; import { fixtures } from './helpers/fixtures.js'; const channelName = ( @@ -135,46 +150,53 @@ Promise.all([ * Listen */ app.listen(); + /** * Sync tasks */ - (new SynchronizationAnalytics(getMask(process.env.SYNC_ANALYTICS_MASK))) - .start(getBoolean(process.env.START_SYNC_ANALYTICS)); - (new SynchronizationProjects(getMask(process.env.SYNC_ANALYTICS_MASK))) - .start(getBoolean(process.env.START_SYNC_ANALYTICS)); + if (process.env.SYNC_ALL_MASK) { + (new SynchronizationAll(getMask(process.env.SYNC_ALL_MASK))) + .start(getBoolean(process.env.START_SYNC_ALL)); + } else { + (new SynchronizationAnalytics(getMask(process.env.SYNC_ANALYTICS_MASK))) + .start(getBoolean(process.env.START_SYNC_ANALYTICS)); + + (new SynchronizationProjects(getMask(process.env.SYNC_ANALYTICS_MASK))) + .start(getBoolean(process.env.START_SYNC_ANALYTICS)); - (new SynchronizationModules(getMask(process.env.SYNC_MODULES_MASK))) - .start(getBoolean(process.env.START_SYNC_MODULES)); + (new SynchronizationModules(getMask(process.env.SYNC_MODULES_MASK))) + .start(getBoolean(process.env.START_SYNC_MODULES)); - (new SynchronizationRegistries(getMask(process.env.SYNC_REGISTRIES_MASK))) - .start(getBoolean(process.env.START_SYNC_REGISTRIES)); + (new SynchronizationRegistries(getMask(process.env.SYNC_REGISTRIES_MASK))) + .start(getBoolean(process.env.START_SYNC_REGISTRIES)); - (new SynchronizationRoles(getMask(process.env.SYNC_ROLES_MASK))) - .start(getBoolean(process.env.START_SYNC_ROLES)); + (new SynchronizationRoles(getMask(process.env.SYNC_ROLES_MASK))) + .start(getBoolean(process.env.START_SYNC_ROLES)); - (new SynchronizationTools(getMask(process.env.SYNC_TOOLS_MASK))) - .start(getBoolean(process.env.START_SYNC_TOOLS)); + (new SynchronizationTools(getMask(process.env.SYNC_TOOLS_MASK))) + .start(getBoolean(process.env.START_SYNC_TOOLS)); - (new SynchronizationTopics(getMask(process.env.SYNC_TOPICS_MASK))) - .start(getBoolean(process.env.START_SYNC_TOPICS)); + (new SynchronizationTopics(getMask(process.env.SYNC_TOPICS_MASK))) + .start(getBoolean(process.env.START_SYNC_TOPICS)); - (new SynchronizationSchemas(getMask(process.env.SYNC_SCHEMAS_MASK))) - .start(getBoolean(process.env.START_SYNC_SCHEMAS)); + (new SynchronizationSchemas(getMask(process.env.SYNC_SCHEMAS_MASK))) + .start(getBoolean(process.env.START_SYNC_SCHEMAS)); - (new SynchronizationDid(getMask(process.env.SYNC_DID_DOCUMENTS_MASK))) - .start(getBoolean(process.env.START_SYNC_DID_DOCUMENTS)); + (new SynchronizationDid(getMask(process.env.SYNC_DID_DOCUMENTS_MASK))) + .start(getBoolean(process.env.START_SYNC_DID_DOCUMENTS)); - (new SynchronizationVCs(getMask(process.env.SYNC_VC_DOCUMENTS_MASK))) - .start(getBoolean(process.env.START_SYNC_VC_DOCUMENTS)); + (new SynchronizationVCs(getMask(process.env.SYNC_VC_DOCUMENTS_MASK))) + .start(getBoolean(process.env.START_SYNC_VC_DOCUMENTS)); - (new SynchronizationVPs(getMask(process.env.SYNC_VP_DOCUMENTS_MASK))) - .start(getBoolean(process.env.START_SYNC_VP_DOCUMENTS)); + (new SynchronizationVPs(getMask(process.env.SYNC_VP_DOCUMENTS_MASK))) + .start(getBoolean(process.env.START_SYNC_VP_DOCUMENTS)); - (new SynchronizationPolicy(getMask(process.env.SYNC_POLICIES_MASK))) - .start(getBoolean(process.env.START_SYNC_POLICIES)); + (new SynchronizationPolicy(getMask(process.env.SYNC_POLICIES_MASK))) + .start(getBoolean(process.env.START_SYNC_POLICIES)); - (new SynchronizationContracts(getMask(process.env.SYNC_CONTRACTS_MASK))) - .start(getBoolean(process.env.START_SYNC_CONTRACTS)); + (new SynchronizationContracts(getMask(process.env.SYNC_CONTRACTS_MASK))) + .start(getBoolean(process.env.START_SYNC_CONTRACTS)); + } }, (reason) => { console.log(reason); diff --git a/indexer-service/src/helpers/synchronization-task.ts b/indexer-service/src/helpers/synchronization-task.ts index 6da59e0b1..d8cab9111 100644 --- a/indexer-service/src/helpers/synchronization-task.ts +++ b/indexer-service/src/helpers/synchronization-task.ts @@ -24,6 +24,13 @@ export abstract class SynchronizationTask { private readonly _mask: string ) { } + /** + * Name + */ + public get taskName(): string { + return this._name; + } + /** * Start synchronization task */ @@ -77,5 +84,5 @@ export abstract class SynchronizationTask { this._job?.stop(); } - protected abstract sync(): Promise; + public abstract sync(): Promise; } diff --git a/indexer-service/src/helpers/synchronizers/index.ts b/indexer-service/src/helpers/synchronizers/index.ts index 3ec568141..45c50e6b5 100644 --- a/indexer-service/src/helpers/synchronizers/index.ts +++ b/indexer-service/src/helpers/synchronizers/index.ts @@ -11,4 +11,5 @@ export * from './synchronize-tool.js'; export * from './synchronize-topic.js'; export * from './synchronize-vp.js'; export * from './synchronize-contracts.js'; -export * from './synchronize-projects.js'; \ No newline at end of file +export * from './synchronize-projects.js'; +export * from './synchronize-all.js'; \ No newline at end of file diff --git a/indexer-service/src/helpers/synchronizers/synchronize-all.ts b/indexer-service/src/helpers/synchronizers/synchronize-all.ts new file mode 100644 index 000000000..3c82bfd8b --- /dev/null +++ b/indexer-service/src/helpers/synchronizers/synchronize-all.ts @@ -0,0 +1,82 @@ +import { SynchronizationTask } from '../synchronization-task.js'; +import { SynchronizationAnalytics } from './synchronize-analytics.js'; +import { SynchronizationContracts } from './synchronize-contracts.js'; +import { SynchronizationDid } from './synchronize-dids.js'; +import { SynchronizationModules } from './synchronize-module.js'; +import { SynchronizationPolicy } from './synchronize-policy.js'; +import { SynchronizationProjects } from './synchronize-projects.js'; +import { SynchronizationRegistries } from './synchronize-registry.js'; +import { SynchronizationRoles } from './synchronize-role.js'; +import { SynchronizationSchemas } from './synchronize-schema.js'; +import { SynchronizationTools } from './synchronize-tool.js'; +import { SynchronizationTopics } from './synchronize-topic.js'; +import { SynchronizationVCs } from './synchronize-vcs.js'; +import { SynchronizationVPs } from './synchronize-vp.js'; + +export class SynchronizationAll extends SynchronizationTask { + public readonly name: string = 'all'; + + private readonly synchronizationAnalytics: SynchronizationAnalytics; + private readonly synchronizationProjects: SynchronizationProjects; + private readonly synchronizationModules: SynchronizationModules; + private readonly synchronizationRegistries: SynchronizationRegistries; + private readonly synchronizationRoles: SynchronizationRoles; + private readonly synchronizationTools: SynchronizationTools; + private readonly synchronizationTopics: SynchronizationTopics; + private readonly synchronizationSchemas: SynchronizationSchemas; + private readonly synchronizationDid: SynchronizationDid; + private readonly synchronizationVCs: SynchronizationVCs; + private readonly synchronizationVPs: SynchronizationVPs; + private readonly synchronizationPolicy: SynchronizationPolicy; + private readonly synchronizationContracts: SynchronizationContracts; + + constructor(mask: string) { + super('all', mask); + + this.synchronizationAnalytics = (new SynchronizationAnalytics(this.getMask(process.env.SYNC_ANALYTICS_MASK))); + this.synchronizationProjects = (new SynchronizationProjects(this.getMask(process.env.SYNC_ANALYTICS_MASK))); + this.synchronizationModules = (new SynchronizationModules(this.getMask(process.env.SYNC_MODULES_MASK))); + this.synchronizationRegistries = (new SynchronizationRegistries(this.getMask(process.env.SYNC_REGISTRIES_MASK))); + this.synchronizationRoles = (new SynchronizationRoles(this.getMask(process.env.SYNC_ROLES_MASK))); + this.synchronizationTools = (new SynchronizationTools(this.getMask(process.env.SYNC_TOOLS_MASK))); + this.synchronizationTopics = (new SynchronizationTopics(this.getMask(process.env.SYNC_TOPICS_MASK))); + this.synchronizationSchemas = (new SynchronizationSchemas(this.getMask(process.env.SYNC_SCHEMAS_MASK))); + this.synchronizationDid = (new SynchronizationDid(this.getMask(process.env.SYNC_DID_DOCUMENTS_MASK))); + this.synchronizationVCs = (new SynchronizationVCs(this.getMask(process.env.SYNC_VC_DOCUMENTS_MASK))); + this.synchronizationVPs = (new SynchronizationVPs(this.getMask(process.env.SYNC_VP_DOCUMENTS_MASK))); + this.synchronizationPolicy = (new SynchronizationPolicy(this.getMask(process.env.SYNC_POLICIES_MASK))); + this.synchronizationContracts = (new SynchronizationContracts(this.getMask(process.env.SYNC_CONTRACTS_MASK))); + } + + public override async sync(): Promise { + await this.runTask(this.synchronizationAnalytics); + await this.runTask(this.synchronizationProjects); + await this.runTask(this.synchronizationModules); + await this.runTask(this.synchronizationRegistries); + await this.runTask(this.synchronizationRoles); + await this.runTask(this.synchronizationTools); + await this.runTask(this.synchronizationTopics); + await this.runTask(this.synchronizationSchemas); + await this.runTask(this.synchronizationDid); + await this.runTask(this.synchronizationVCs); + await this.runTask(this.synchronizationVPs); + await this.runTask(this.synchronizationPolicy); + await this.runTask(this.synchronizationContracts); + } + + private async runTask(task: SynchronizationTask) { + console.log(`${task.taskName} task is started`); + try { + console.time(`----- sync ${task.taskName} -----`); + await task.sync(); + console.timeEnd(`----- sync ${task.taskName} -----`); + } catch (error) { + console.log(error); + } + console.log(`${task.taskName} task is finished`); + } + + private getMask(mask: string | undefined): string { + return (mask || '0 * * * *'); + } +} \ No newline at end of file diff --git a/indexer-service/src/helpers/synchronizers/synchronize-analytics.ts b/indexer-service/src/helpers/synchronizers/synchronize-analytics.ts index 8183880e5..893ec8340 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-analytics.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-analytics.ts @@ -9,7 +9,7 @@ export class SynchronizationAnalytics extends SynchronizationTask { super('analytics', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const registries = await em.count(Message, { diff --git a/indexer-service/src/helpers/synchronizers/synchronize-contracts.ts b/indexer-service/src/helpers/synchronizers/synchronize-contracts.ts index 7f19219fb..ee0cbe594 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-contracts.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-contracts.ts @@ -10,7 +10,7 @@ export class SynchronizationContracts extends SynchronizationTask { super('contracts', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-dids.ts b/indexer-service/src/helpers/synchronizers/synchronize-dids.ts index 0910205c8..52277b0a6 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-dids.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-dids.ts @@ -10,7 +10,7 @@ export class SynchronizationDid extends SynchronizationTask { super('dids', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-module.ts b/indexer-service/src/helpers/synchronizers/synchronize-module.ts index 0fc8d648c..784f1a92d 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-module.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-module.ts @@ -10,7 +10,7 @@ export class SynchronizationModules extends SynchronizationTask { super('modules', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-policy.ts b/indexer-service/src/helpers/synchronizers/synchronize-policy.ts index 57355e17c..bd8f3673f 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-policy.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-policy.ts @@ -18,7 +18,7 @@ export class SynchronizationPolicy extends SynchronizationTask { super('policy', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); const collection2 = em.getCollection('token_cache'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-projects.ts b/indexer-service/src/helpers/synchronizers/synchronize-projects.ts index 195cc25ac..9e8782046 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-projects.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-projects.ts @@ -22,7 +22,7 @@ export class SynchronizationProjects extends SynchronizationTask { super('projects', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const projectLocations = await em .getCollection('project_coordinates') diff --git a/indexer-service/src/helpers/synchronizers/synchronize-registry.ts b/indexer-service/src/helpers/synchronizers/synchronize-registry.ts index 94cc579a1..97d06f112 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-registry.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-registry.ts @@ -10,7 +10,7 @@ export class SynchronizationRegistries extends SynchronizationTask { super('registries', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-role.ts b/indexer-service/src/helpers/synchronizers/synchronize-role.ts index f579f46e5..b1f752f21 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-role.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-role.ts @@ -10,7 +10,7 @@ export class SynchronizationRoles extends SynchronizationTask { super('roles', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-schema.ts b/indexer-service/src/helpers/synchronizers/synchronize-schema.ts index 53d604532..e00c8cb85 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-schema.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-schema.ts @@ -11,7 +11,7 @@ export class SynchronizationSchemas extends SynchronizationTask { super('schemas', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-tool.ts b/indexer-service/src/helpers/synchronizers/synchronize-tool.ts index 3986e3a19..8210d0946 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-tool.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-tool.ts @@ -10,7 +10,7 @@ export class SynchronizationTools extends SynchronizationTask { super('tools', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-topic.ts b/indexer-service/src/helpers/synchronizers/synchronize-topic.ts index fa69c36c1..8e6ff5f8f 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-topic.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-topic.ts @@ -10,7 +10,7 @@ export class SynchronizationTopics extends SynchronizationTask { super('topics', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-vcs.ts b/indexer-service/src/helpers/synchronizers/synchronize-vcs.ts index 84b29a85d..d7b9610d3 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-vcs.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-vcs.ts @@ -11,7 +11,7 @@ export class SynchronizationVCs extends SynchronizationTask { super('vcs', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/indexer-service/src/helpers/synchronizers/synchronize-vp.ts b/indexer-service/src/helpers/synchronizers/synchronize-vp.ts index bcda55c75..4ffbfbf50 100644 --- a/indexer-service/src/helpers/synchronizers/synchronize-vp.ts +++ b/indexer-service/src/helpers/synchronizers/synchronize-vp.ts @@ -11,7 +11,7 @@ export class SynchronizationVPs extends SynchronizationTask { super('vps', mask); } - protected override async sync(): Promise { + public override async sync(): Promise { const em = DataBaseHelper.getEntityManager(); const collection = em.getCollection('message'); diff --git a/interfaces/src/models/schema.ts b/interfaces/src/models/schema.ts index 549f20502..c64b5fe3d 100644 --- a/interfaces/src/models/schema.ts +++ b/interfaces/src/models/schema.ts @@ -429,4 +429,24 @@ export class Schema implements ISchema { }); } } + + /** + * Get all fields + */ + public getFields(): SchemaField[] { + return this._getFields([], this.fields); + } + + /** + * Get all fields + */ + private _getFields(result: SchemaField[], fields?: SchemaField[]): SchemaField[] { + if (Array.isArray(fields)) { + for (const field of fields) { + result.push(field); + this._getFields(result, field.fields); + } + } + return result; + } }