diff --git a/packages/packages.json b/packages/packages.json index dfacc02..59f0ee6 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -1,12 +1,12 @@ { "dev": { - "contract/valory/fpmm_deterministic_factory/0.1.0": "bafybeihi2pozsqbgu6mkepoyqqwqxc3o7ks47tx7vuf5hxfkbsyuskzqjq", + "contract/valory/fpmm_deterministic_factory/0.1.0": "bafybeihcvttq34xxd35tfwzdg2qunatyaq373uwkpo5fe6nlxrzya4tuqe", "contract/valory/wxdai/0.1.0": "bafybeidalocwbhmbto6ii6adldtpcughtdt6j3v4tv36utevjk2wrdyqie", "contract/valory/fpmm/0.1.0": "bafybeiai2ruj27nnglvn7yc5atojyojo3fkmofw6wrjgz2ybps2uwdizx4", - "skill/valory/market_creation_manager_abci/0.1.0": "bafybeidmzffvwvh3euspl7rub2mnhhqj4qhurwbryxvxaxsntueu2gelve", - "skill/valory/market_maker_abci/0.1.0": "bafybeic2zhtrusglacvb6uc66w2irybg2xm4wtwuounkmkaox3icmbwdtm", - "agent/valory/market_maker/0.1.0": "bafybeidsov3f5hchknxiyeouoeupqot5o44y2as6u2q4fcqeszz7bycd2a", - "service/valory/market_maker/0.1.0": "bafybeihvw3wntknp4sfhsd3vreth7srtbrme5wqoxj73ameabg3t6mofd4" + "skill/valory/market_creation_manager_abci/0.1.0": "bafybeih2hbwpalkavcelgm2yfv4fqgldwadiyesj4rowmhzzgt3eitmshq", + "skill/valory/market_maker_abci/0.1.0": "bafybeiclluyvzt4gvjhtc2qbcfpdhqq4mb44xszsvu2wx4hmxaehdsmwbi", + "agent/valory/market_maker/0.1.0": "bafybeib2jsel72sqshzqanf5zhraz2oajnrtdavybg3j26fc4kxrn7uabu", + "service/valory/market_maker/0.1.0": "bafybeif2bmcvqmu7s4l445lbcvqbqysvhrqfkdchuae2rs5kfbsaz6qd7i" }, "third_party": { "protocol/valory/contract_api/1.0.0": "bafybeidgu7o5llh26xp3u3ebq3yluull5lupiyeu6iooi2xyymdrgnzq5i", @@ -18,27 +18,28 @@ "protocol/valory/acn/1.1.0": "bafybeidluaoeakae3exseupaea4i3yvvk5vivyt227xshjlffywwxzcxqe", "protocol/valory/tendermint/0.1.0": "bafybeig4mi3vmlv5zpbjbfuzcgida6j5f2nhrpedxicmrrfjweqc5r7cra", "protocol/valory/llm/1.0.0": "bafybeihj2orqapokafg7co3wlogwgf6cckaaj2bz3nuv4rm3opkoyhrlp4", - "contract/valory/service_registry/0.1.0": "bafybeibtaa6tvhzoo4osu3moilxtwxbfwqo3vt7rsr3ugjugzwndzkkmd4", - "contract/valory/gnosis_safe/0.1.0": "bafybeif3n23srpc7kouvry6c5h3qmd4h63vtd3gg7el2upjm6xac6mligi", - "contract/valory/gnosis_safe_proxy_factory/0.1.0": "bafybeic5f3vnd3pkzlhapkzgtrfmer7otjg3xxaflxkadpz6prjxk3salq", + "contract/valory/service_registry/0.1.0": "bafybeigrfupd7lo6aet376rwluqgm33jfghibkbvumfsdgrymqxoopqydq", + "contract/valory/gnosis_safe/0.1.0": "bafybeiag5jjj5c66skkbjnxcjngeufhtcvcpnbnjlgox5mtuo2tk4w3ohi", + "contract/valory/gnosis_safe_proxy_factory/0.1.0": "bafybeiafghfcxrg3apccnrvvw7tfgjbopedbfevmhepw2reyhyertxrilm", "contract/valory/multisend/0.1.0": "bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y", "contract/valory/realitio/0.1.0": "bafybeignaxfk7fjlfa4lioflwsfru2z5xln5cyvyew3if5oas2sgm5wuii", "contract/valory/realitio_proxy/0.1.0": "bafybeidx37xzjjmapwacedgzhum6grfzhp5vhouz4zu3pvpgdy5pgb2fr4", "contract/valory/conditional_tokens/0.1.0": "bafybeibnzmqmeph4cj5vfh3s622mo2o5627vjjwc6bptrhj4dk65mzgvhe", - "contract/valory/mech/0.1.0": "bafybeihahwhu7mxk3bu4ruvvohqdgfrctaaelnbfrbavfzc6qzvdfgdhau", + "contract/valory/mech/0.1.0": "bafybeicvxrwnipnerhesfdylhyielxnssdhlgyt6vhh7wmknkay7fnv2xa", + "contract/valory/erc20/0.1.0": "bafybeifaxbokuzwsbxbwda5vtreznd233eu6sa2dmw4zsyg2p6b3rz7spy", "contract/valory/delegate/0.1.0": "bafybeiaslboosyvpzccaqgie56wta3aqrdkgfx5vs5ogxu7ebiez67wp6i", - "connection/valory/ipfs/0.1.0": "bafybeieogncafs6j6akrvsj6oi63iwhuunjyssmhz736vrl2sku4jc77za", - "connection/valory/abci/0.1.0": "bafybeigks2eqrpf5zn3qbah2uzf3fcpvqstommftpxlyhs32cjdg32lzea", + "connection/valory/ipfs/0.1.0": "bafybeifqca6e232lbvwrjhd7ioh43bo3evxfkpumdvcr6re2sdwjuntgna", + "connection/valory/abci/0.1.0": "bafybeihhtx7t5fsxaoajzq5nm4hrq57smigx7gqv35bss766txaaffjmsa", "connection/valory/openai/0.1.0": "bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a", "connection/valory/p2p_libp2p_client/0.1.0": "bafybeid3xg5k2ol5adflqloy75ibgljmol6xsvzvezebsg7oudxeeolz7e", - "connection/valory/ledger/0.19.0": "bafybeic3ft7l7ca3qgnderm4xupsfmyoihgi27ukotnz7b5hdczla2enya", - "connection/valory/http_client/0.23.0": "bafybeih5vzo22p2umhqo52nzluaanxx7kejvvpcpdsrdymckkyvmsim6gm", - "skill/valory/abstract_abci/0.1.0": "bafybeibvz3hi25ktgdzm25kih25o5osml62zk4kzpgmzmyrem3hzo2vtnm", - "skill/valory/abstract_round_abci/0.1.0": "bafybeic2emnylfmdtidobgdsxa4tgdelreeimtglqzrmic6cumhpsbfzhe", - "skill/valory/transaction_settlement_abci/0.1.0": "bafybeifhuemoavm6hf4vypg2sl7kng3iv4ipxgpk3itbh5hp73lworpuwq", - "skill/valory/registration_abci/0.1.0": "bafybeibzduasg5pahk2d2glc36wlopg54ybwcdmjqbzozt2lmkp4reqsdu", - "skill/valory/reset_pause_abci/0.1.0": "bafybeib3geaw3o35aigmb4dhplxrjlsef7hbdwrozu4akiunlgyiegshj4", - "skill/valory/termination_abci/0.1.0": "bafybeifnfuy3svz5oz6kwglu35q6jn7ai3riqplyypskb7za5ejyvjmfza", - "skill/valory/mech_interact_abci/0.1.0": "bafybeigilca6rfqu4qw7yhz5lnhzgymj5cabtusx5hjndt7w7kulkn7cga" + "connection/valory/ledger/0.19.0": "bafybeig7woeog4srdby75hpjkmx4rhpkzncbf4h2pm5r6varsp26pf2uhu", + "connection/valory/http_client/0.23.0": "bafybeihi772xgzpqeipp3fhmvpct4y6e6tpjp4sogwqrnf3wqspgeilg4u", + "skill/valory/abstract_abci/0.1.0": "bafybeiedikuvfpdx7xhyrxcpp6ywi2d6qf6uqvlwmhgcal7qhw5duicvym", + "skill/valory/abstract_round_abci/0.1.0": "bafybeia7msuvsouwcky263k6lup5hwcj73pka4pepkgyii6sya2wfawqvy", + "skill/valory/transaction_settlement_abci/0.1.0": "bafybeihfrdgfhu7ijjorvktjplfa4aq3b5as4dtwmkgl6nhy2oz4ayidfu", + "skill/valory/registration_abci/0.1.0": "bafybeihwkqc6klqrk247esh4cumfphosx3yadullxhmrrkovzg2rward5y", + "skill/valory/reset_pause_abci/0.1.0": "bafybeibd5divbbng3klkxlkzfwmwdc7imobcymfx57lf3owbyf7we7xdem", + "skill/valory/termination_abci/0.1.0": "bafybeifw36rnniyjay4f3af6jtfxpeycm5nu4zm4ejoutsk4yh2rv24ysm", + "skill/valory/mech_interact_abci/0.1.0": "bafybeieemsqlorshzgagcevh3u5tym5e2basny6qozfeoutv5sg2a56kg4" } } \ No newline at end of file diff --git a/packages/valory/agents/market_maker/aea-config.yaml b/packages/valory/agents/market_maker/aea-config.yaml index 34c260a..762c288 100644 --- a/packages/valory/agents/market_maker/aea-config.yaml +++ b/packages/valory/agents/market_maker/aea-config.yaml @@ -9,19 +9,19 @@ fingerprint: __init__.py: bafybeifamf4vln3qgmifksv5icmhnkelqirbaauehuv5hpsu2twesb2vni fingerprint_ignore_patterns: [] connections: -- valory/abci:0.1.0:bafybeigks2eqrpf5zn3qbah2uzf3fcpvqstommftpxlyhs32cjdg32lzea -- valory/http_client:0.23.0:bafybeih5vzo22p2umhqo52nzluaanxx7kejvvpcpdsrdymckkyvmsim6gm -- valory/ledger:0.19.0:bafybeic3ft7l7ca3qgnderm4xupsfmyoihgi27ukotnz7b5hdczla2enya +- valory/abci:0.1.0:bafybeihhtx7t5fsxaoajzq5nm4hrq57smigx7gqv35bss766txaaffjmsa +- valory/http_client:0.23.0:bafybeihi772xgzpqeipp3fhmvpct4y6e6tpjp4sogwqrnf3wqspgeilg4u +- valory/ledger:0.19.0:bafybeig7woeog4srdby75hpjkmx4rhpkzncbf4h2pm5r6varsp26pf2uhu - valory/openai:0.1.0:bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a - valory/p2p_libp2p_client:0.1.0:bafybeid3xg5k2ol5adflqloy75ibgljmol6xsvzvezebsg7oudxeeolz7e contracts: -- valory/gnosis_safe:0.1.0:bafybeif3n23srpc7kouvry6c5h3qmd4h63vtd3gg7el2upjm6xac6mligi -- valory/gnosis_safe_proxy_factory:0.1.0:bafybeic5f3vnd3pkzlhapkzgtrfmer7otjg3xxaflxkadpz6prjxk3salq +- valory/gnosis_safe:0.1.0:bafybeiag5jjj5c66skkbjnxcjngeufhtcvcpnbnjlgox5mtuo2tk4w3ohi +- valory/gnosis_safe_proxy_factory:0.1.0:bafybeiafghfcxrg3apccnrvvw7tfgjbopedbfevmhepw2reyhyertxrilm - valory/multisend:0.1.0:bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y - valory/realitio:0.1.0:bafybeignaxfk7fjlfa4lioflwsfru2z5xln5cyvyew3if5oas2sgm5wuii - valory/conditional_tokens:0.1.0:bafybeibnzmqmeph4cj5vfh3s622mo2o5627vjjwc6bptrhj4dk65mzgvhe -- valory/fpmm_deterministic_factory:0.1.0:bafybeihi2pozsqbgu6mkepoyqqwqxc3o7ks47tx7vuf5hxfkbsyuskzqjq -- valory/service_registry:0.1.0:bafybeibtaa6tvhzoo4osu3moilxtwxbfwqo3vt7rsr3ugjugzwndzkkmd4 +- valory/fpmm_deterministic_factory:0.1.0:bafybeihcvttq34xxd35tfwzdg2qunatyaq373uwkpo5fe6nlxrzya4tuqe +- valory/service_registry:0.1.0:bafybeigrfupd7lo6aet376rwluqgm33jfghibkbvumfsdgrymqxoopqydq - valory/wxdai:0.1.0:bafybeidalocwbhmbto6ii6adldtpcughtdt6j3v4tv36utevjk2wrdyqie - valory/fpmm:0.1.0:bafybeiai2ruj27nnglvn7yc5atojyojo3fkmofw6wrjgz2ybps2uwdizx4 protocols: @@ -35,15 +35,15 @@ protocols: - valory/llm:1.0.0:bafybeihj2orqapokafg7co3wlogwgf6cckaaj2bz3nuv4rm3opkoyhrlp4 - valory/tendermint:0.1.0:bafybeig4mi3vmlv5zpbjbfuzcgida6j5f2nhrpedxicmrrfjweqc5r7cra skills: -- valory/abstract_abci:0.1.0:bafybeibvz3hi25ktgdzm25kih25o5osml62zk4kzpgmzmyrem3hzo2vtnm -- valory/abstract_round_abci:0.1.0:bafybeic2emnylfmdtidobgdsxa4tgdelreeimtglqzrmic6cumhpsbfzhe -- valory/market_maker_abci:0.1.0:bafybeic2zhtrusglacvb6uc66w2irybg2xm4wtwuounkmkaox3icmbwdtm -- valory/registration_abci:0.1.0:bafybeibzduasg5pahk2d2glc36wlopg54ybwcdmjqbzozt2lmkp4reqsdu -- valory/market_creation_manager_abci:0.1.0:bafybeidmzffvwvh3euspl7rub2mnhhqj4qhurwbryxvxaxsntueu2gelve -- valory/reset_pause_abci:0.1.0:bafybeib3geaw3o35aigmb4dhplxrjlsef7hbdwrozu4akiunlgyiegshj4 -- valory/termination_abci:0.1.0:bafybeifnfuy3svz5oz6kwglu35q6jn7ai3riqplyypskb7za5ejyvjmfza -- valory/transaction_settlement_abci:0.1.0:bafybeifhuemoavm6hf4vypg2sl7kng3iv4ipxgpk3itbh5hp73lworpuwq -- valory/mech_interact_abci:0.1.0:bafybeigilca6rfqu4qw7yhz5lnhzgymj5cabtusx5hjndt7w7kulkn7cga +- valory/abstract_abci:0.1.0:bafybeiedikuvfpdx7xhyrxcpp6ywi2d6qf6uqvlwmhgcal7qhw5duicvym +- valory/abstract_round_abci:0.1.0:bafybeia7msuvsouwcky263k6lup5hwcj73pka4pepkgyii6sya2wfawqvy +- valory/market_maker_abci:0.1.0:bafybeiclluyvzt4gvjhtc2qbcfpdhqq4mb44xszsvu2wx4hmxaehdsmwbi +- valory/registration_abci:0.1.0:bafybeihwkqc6klqrk247esh4cumfphosx3yadullxhmrrkovzg2rward5y +- valory/market_creation_manager_abci:0.1.0:bafybeih2hbwpalkavcelgm2yfv4fqgldwadiyesj4rowmhzzgt3eitmshq +- valory/reset_pause_abci:0.1.0:bafybeibd5divbbng3klkxlkzfwmwdc7imobcymfx57lf3owbyf7we7xdem +- valory/termination_abci:0.1.0:bafybeifw36rnniyjay4f3af6jtfxpeycm5nu4zm4ejoutsk4yh2rv24ysm +- valory/transaction_settlement_abci:0.1.0:bafybeihfrdgfhu7ijjorvktjplfa4aq3b5as4dtwmkgl6nhy2oz4ayidfu +- valory/mech_interact_abci:0.1.0:bafybeieemsqlorshzgagcevh3u5tym5e2basny6qozfeoutv5sg2a56kg4 default_ledger: ethereum required_ledgers: - ethereum @@ -76,11 +76,11 @@ logging_config: skill_exception_policy: stop_and_exit dependencies: open-aea-ledger-cosmos: - version: ==1.49.0 + version: ==1.51.0 open-aea-ledger-ethereum: - version: ==1.49.0 + version: ==1.51.0 open-aea-test-autonomy: - version: ==0.14.8 + version: ==0.14.11.post1 default_connection: null --- public_id: valory/abci:0.1.0 @@ -219,4 +219,5 @@ models: google_engine_id: ${str:google_engine_id} openai_api_key: ${str:openai_api_key} mech_contract_address: ${str:0x77af31de935740567cf4ff1986d04b2c964a786a} + answer_retry_intervals: ${list:[0, 86400, 259200, 604800, 1209600]} is_abstract: false diff --git a/packages/valory/contracts/fpmm_deterministic_factory/contract.yaml b/packages/valory/contracts/fpmm_deterministic_factory/contract.yaml index fbde586..3faf70c 100644 --- a/packages/valory/contracts/fpmm_deterministic_factory/contract.yaml +++ b/packages/valory/contracts/fpmm_deterministic_factory/contract.yaml @@ -16,9 +16,9 @@ contract_interface_paths: dependencies: hexbytes: {} open-aea-ledger-ethereum: - version: ==1.49.0 + version: ==1.51.0 open-aea-test-autonomy: - version: ==0.14.8 + version: ==0.14.11.post1 web3: version: <7,>=6.0.0 contracts: [] diff --git a/packages/valory/services/market_maker/service.yaml b/packages/valory/services/market_maker/service.yaml index 6baaae3..16946bf 100644 --- a/packages/valory/services/market_maker/service.yaml +++ b/packages/valory/services/market_maker/service.yaml @@ -7,7 +7,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeibwz3af6326msp4h3kqehijvmyhaytvyfbo3o2npc2w4b6zrg6pfq fingerprint_ignore_patterns: [] -agent: valory/market_maker:0.1.0:bafybeidsov3f5hchknxiyeouoeupqot5o44y2as6u2q4fcqeszz7bycd2a +agent: valory/market_maker:0.1.0:bafybeib2jsel72sqshzqanf5zhraz2oajnrtdavybg3j26fc4kxrn7uabu number_of_agents: 1 deployment: agent: @@ -71,6 +71,8 @@ models: multisend_batch_size: ${MULTISEND_BATCH_SIZE:int:1} ipfs_address: ${IPFS_ADDRESS:str:https://gateway.autonolas.tech/ipfs/} mech_contract_address: ${MECH_CONTRACT_ADDRESS:str:0x77af31de935740567cf4ff1986d04b2c964a786a} + answer_retry_intervals: ${ANSWER_RETRY_INTERVALS:list:[0, 86400, 259200, 604800, + 1209600]} on_chain_service_id: ${ON_CHAIN_SERVICE_ID:int:null} reset_tendermint_after: ${RESET_TENDERMINT_AFTER:int:1} reset_pause_duration: ${RESET_PAUSE_DURATION:int:1800} diff --git a/packages/valory/skills/market_creation_manager_abci/behaviours.py b/packages/valory/skills/market_creation_manager_abci/behaviours.py index 93629ce..8c4ddc4 100644 --- a/packages/valory/skills/market_creation_manager_abci/behaviours.py +++ b/packages/valory/skills/market_creation_manager_abci/behaviours.py @@ -2291,6 +2291,52 @@ def _get_balance(self, account: str) -> Generator[None, None, Optional[int]]: balance = cast(int, ledger_api_response.state.body.get("get_balance_result")) return balance + def _eligible_questions_to_answer( + self, unanswered_questions: List[Dict[str, Any]] + ) -> List[str]: + """Determine the eligible questions to answer at this time""" + now = self.last_synced_timestamp + eligible_questions_id = [] + answer_retry_intervals = self.params.answer_retry_intervals + + self.context.logger.info(f"Answer retry intervals: {answer_retry_intervals}") + + for question in unanswered_questions: + question_id = question["question"]["id"].lower() + + if question_id in self.shared_state.questions_responded: + continue + + if question_id not in self.shared_state.questions_requested_mech: + self.shared_state.questions_requested_mech[question_id] = { + "question": question, + "retries": [], + } + + retries = self.shared_state.questions_requested_mech[question_id]["retries"] + n_retries = len(retries) + time_since_last_retry = now - retries[-1] if retries else 0 + retry_period = answer_retry_intervals[ + min(n_retries, len(answer_retry_intervals) - 1) + ] + if n_retries == 0 or time_since_last_retry > retry_period: + eligible_questions_id.append(question_id) + + self.context.logger.info( + f"Determined {len(eligible_questions_id)} eligible questions to answer." + ) + + num_questions = min( + len(eligible_questions_id), self.params.multisend_batch_size + ) + random.seed(self.last_synced_timestamp) + random_questions_id = random.sample(eligible_questions_id, num_questions) + + self.context.logger.info( + f"Chosen {len(random_questions_id)} eligible questions to answer." + ) + return random_questions_id + def get_payload(self) -> Generator[None, None, str]: """Get the transaction payload""" # get the questions to that need to be answered @@ -2300,23 +2346,17 @@ def get_payload(self) -> Generator[None, None, str]: self.context.logger.info("Couldn't get the questions") return GetPendingQuestionsRound.ERROR_PAYLOAD - filtered_questions = [ - question - for question in unanswered_questions - if question["question"]["id"].lower() - not in self.shared_state.questions_requested_mech - ] - random.seed(self.last_synced_timestamp) - num_questions = min(len(filtered_questions), self.params.multisend_batch_size) - random_questions = random.sample(filtered_questions, num_questions) - questions = random_questions + eligible_questions_id = self._eligible_questions_to_answer(unanswered_questions) + + self.context.logger.info(f"{self.shared_state.questions_requested_mech=}") - if len(questions) == 0: - self.context.logger.info("No questions to close") + if len(eligible_questions_id) == 0: + self.context.logger.info("No eligible questions to answer") return GetPendingQuestionsRound.NO_TX_PAYLOAD self.context.logger.info( - f"Got {len(questions)} questions to close. " f"Questions: {questions}" + f"Got {len(eligible_questions_id)} questions to close. " + f"Questions ID: {eligible_questions_id}" ) safe_address = self.synchronized_data.safe_contract_address @@ -2327,7 +2367,7 @@ def get_payload(self) -> Generator[None, None, str]: self.context.logger.info(f"Address {safe_address!r} has balance {balance}.") max_num_questions = min( - len(questions), self.params.questions_to_close_batch_size + len(eligible_questions_id), self.params.questions_to_close_batch_size ) bond_required = self.params.close_question_bond * max_num_questions @@ -2342,14 +2382,17 @@ def get_payload(self) -> Generator[None, None, str]: # Prepare the Mech Requests for these questions new_mech_requests = [] - for question in questions: - question_id = question["question"]["id"].lower() - if question_id in self.shared_state.questions_requested_mech: - # we already processed this question, skip it - self.context.logger.info( - f"Question {question_id} already processed, skipping it." - ) - continue + for question_id in eligible_questions_id: + question = self.shared_state.questions_requested_mech[question_id][ + "question" + ] + retries = self.shared_state.questions_requested_mech[question_id]["retries"] + retries.append(self.last_synced_timestamp) + + self.context.logger.info( + f"Requesting mech answer for question {question_id} ({question['title']})." + ) + self.context.logger.info(f"Question {question_id} retries: {retries}.") new_mech_requests.append( asdict( @@ -2360,7 +2403,6 @@ def get_payload(self) -> Generator[None, None, str]: ) ) ) - self.shared_state.questions_requested_mech[question_id] = question self.context.logger.info(f"new_mech_requests: {new_mech_requests}") @@ -2432,9 +2474,20 @@ def _get_payload(self) -> Generator[None, None, str]: if question_id not in self.shared_state.questions_requested_mech: continue - question = self.shared_state.questions_requested_mech[question_id] - + question = self.shared_state.questions_requested_mech[question_id][ + "question" + ] + retries = self.shared_state.questions_requested_mech[question_id]["retries"] answer = self._parse_mech_response(response) + + if answer is None and len(retries) >= len( + self.params.answer_retry_intervals + ): + self.context.logger.info( + f"Question {question} has been retried at timestamps {retries} without success. Assuming question is invalid." + ) + answer = ANSWER_INVALID + self.context.logger.info(f"Got answer {answer} for question {question}") if answer is None: @@ -2469,6 +2522,7 @@ def _get_payload(self) -> Generator[None, None, str]: # try to answer the same question multiple times due to a out-of-sync issue # between the subgraph and the realitio contract. self.shared_state.questions_responded.add(question_id) + del self.shared_state.questions_requested_mech[question_id] if len(txs) == 0: # something went wrong, respond with ERROR payload for now diff --git a/packages/valory/skills/market_creation_manager_abci/models.py b/packages/valory/skills/market_creation_manager_abci/models.py index bafb849..7922511 100644 --- a/packages/valory/skills/market_creation_manager_abci/models.py +++ b/packages/valory/skills/market_creation_manager_abci/models.py @@ -163,6 +163,9 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self.mech_interact_round_timeout_seconds = self._ensure( "mech_interact_round_timeout_seconds", kwargs, type_=int ) + self.answer_retry_intervals = self._ensure( + key="answer_retry_intervals", kwargs=kwargs, type_=List[int] + ) super().__init__(*args, **kwargs) diff --git a/packages/valory/skills/market_creation_manager_abci/skill.yaml b/packages/valory/skills/market_creation_manager_abci/skill.yaml index 4d7f697..be0e484 100644 --- a/packages/valory/skills/market_creation_manager_abci/skill.yaml +++ b/packages/valory/skills/market_creation_manager_abci/skill.yaml @@ -10,11 +10,11 @@ aea_version: '>=1.0.0, <2.0.0' fingerprint: .gitignore: bafybeihdfdezgtr3s2lzq5y3oaitfrdy4u4pehionju2bdez35wcjgqx6y __init__.py: bafybeibkyjt4covc4yhd22aw7kav45zozk3exdv344emt3lilln64soaxm - behaviours.py: bafybeigb3lkuhg66m7kucao3j6eknoz6pbjhqwvyaos44hqcjus7nuzpcm + behaviours.py: bafybeigqohcwcmboyit2ihfo3b5bxoedltvrgr2u6uotb7aomr4d4aotle dialogues.py: bafybeicmaufkl7vdomnfciv7lw4536ssld7x4uemdapuhsyvfpd4ncibza fsm_specification.yaml: bafybeicps5t2anm7fv35fwnw7oolaxxmpmjio6mdw4sc6rzpnsolph5xlm handlers.py: bafybeietxjfli2i57kb7heoy772rcq2znusl36gg7jjj5g3pddw7egny3q - models.py: bafybeibkjv4cgvje2ozuzbmcvhscvnvd5isenlpuwmgjr4oyscmgjdobre + models.py: bafybeibef4czds6n7h6bgdcpfmfrhakbtegm4iffauz24pmsu7xaaw6abe payloads.py: bafybeibu7sptf43adazxpyzwtfpph7bgfhtwiotg5sdlcwjt6iw3idqn7a rounds.py: bafybeie6rgwfulbu5xkuvxgrujbvenioyfggtzlmt23pbm6auzj3w4se44 tests/__init__.py: bafybeihfxvqnyfly72tbxnnnglshcilm2kanihqnjiasvcz3ec3csw32ti @@ -22,11 +22,11 @@ fingerprint_ignore_patterns: [] connections: - valory/openai:0.1.0:bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a contracts: -- valory/gnosis_safe:0.1.0:bafybeif3n23srpc7kouvry6c5h3qmd4h63vtd3gg7el2upjm6xac6mligi +- valory/gnosis_safe:0.1.0:bafybeiag5jjj5c66skkbjnxcjngeufhtcvcpnbnjlgox5mtuo2tk4w3ohi - valory/multisend:0.1.0:bafybeig5byt5urg2d2bsecufxe5ql7f4mezg3mekfleeh32nmuusx66p4y - valory/realitio:0.1.0:bafybeignaxfk7fjlfa4lioflwsfru2z5xln5cyvyew3if5oas2sgm5wuii - valory/conditional_tokens:0.1.0:bafybeibnzmqmeph4cj5vfh3s622mo2o5627vjjwc6bptrhj4dk65mzgvhe -- valory/fpmm_deterministic_factory:0.1.0:bafybeihi2pozsqbgu6mkepoyqqwqxc3o7ks47tx7vuf5hxfkbsyuskzqjq +- valory/fpmm_deterministic_factory:0.1.0:bafybeihcvttq34xxd35tfwzdg2qunatyaq373uwkpo5fe6nlxrzya4tuqe - valory/wxdai:0.1.0:bafybeidalocwbhmbto6ii6adldtpcughtdt6j3v4tv36utevjk2wrdyqie - valory/fpmm:0.1.0:bafybeiai2ruj27nnglvn7yc5atojyojo3fkmofw6wrjgz2ybps2uwdizx4 protocols: @@ -34,9 +34,9 @@ protocols: - valory/contract_api:1.0.0:bafybeidgu7o5llh26xp3u3ebq3yluull5lupiyeu6iooi2xyymdrgnzq5i - valory/ledger_api:1.0.0:bafybeihdk6psr4guxmbcrc26jr2cbgzpd5aljkqvpwo64bvaz7tdti2oni skills: -- valory/abstract_round_abci:0.1.0:bafybeic2emnylfmdtidobgdsxa4tgdelreeimtglqzrmic6cumhpsbfzhe -- valory/transaction_settlement_abci:0.1.0:bafybeifhuemoavm6hf4vypg2sl7kng3iv4ipxgpk3itbh5hp73lworpuwq -- valory/mech_interact_abci:0.1.0:bafybeigilca6rfqu4qw7yhz5lnhzgymj5cabtusx5hjndt7w7kulkn7cga +- valory/abstract_round_abci:0.1.0:bafybeia7msuvsouwcky263k6lup5hwcj73pka4pepkgyii6sya2wfawqvy +- valory/transaction_settlement_abci:0.1.0:bafybeihfrdgfhu7ijjorvktjplfa4aq3b5as4dtwmkgl6nhy2oz4ayidfu +- valory/mech_interact_abci:0.1.0:bafybeieemsqlorshzgagcevh3u5tym5e2basny6qozfeoutv5sg2a56kg4 behaviours: main: args: {} @@ -218,6 +218,12 @@ models: light_slash_unit_amount: 5000000000000000 serious_slash_unit_amount: 8000000000000000 mech_contract_address: '0x77af31de935740567cf4ff1986d04b2c964a786a' + answer_retry_intervals: + - 0 + - 86400 + - 259200 + - 604800 + - 1209600 class_name: MarketCreationManagerParams randomness_api: args: diff --git a/packages/valory/skills/market_maker_abci/skill.yaml b/packages/valory/skills/market_maker_abci/skill.yaml index 5efc610..b67ac41 100644 --- a/packages/valory/skills/market_maker_abci/skill.yaml +++ b/packages/valory/skills/market_maker_abci/skill.yaml @@ -21,13 +21,13 @@ connections: [] contracts: [] protocols: [] skills: -- valory/abstract_round_abci:0.1.0:bafybeic2emnylfmdtidobgdsxa4tgdelreeimtglqzrmic6cumhpsbfzhe -- valory/registration_abci:0.1.0:bafybeibzduasg5pahk2d2glc36wlopg54ybwcdmjqbzozt2lmkp4reqsdu -- valory/reset_pause_abci:0.1.0:bafybeib3geaw3o35aigmb4dhplxrjlsef7hbdwrozu4akiunlgyiegshj4 -- valory/market_creation_manager_abci:0.1.0:bafybeidmzffvwvh3euspl7rub2mnhhqj4qhurwbryxvxaxsntueu2gelve -- valory/termination_abci:0.1.0:bafybeifnfuy3svz5oz6kwglu35q6jn7ai3riqplyypskb7za5ejyvjmfza -- valory/transaction_settlement_abci:0.1.0:bafybeifhuemoavm6hf4vypg2sl7kng3iv4ipxgpk3itbh5hp73lworpuwq -- valory/mech_interact_abci:0.1.0:bafybeigilca6rfqu4qw7yhz5lnhzgymj5cabtusx5hjndt7w7kulkn7cga +- valory/abstract_round_abci:0.1.0:bafybeia7msuvsouwcky263k6lup5hwcj73pka4pepkgyii6sya2wfawqvy +- valory/registration_abci:0.1.0:bafybeihwkqc6klqrk247esh4cumfphosx3yadullxhmrrkovzg2rward5y +- valory/reset_pause_abci:0.1.0:bafybeibd5divbbng3klkxlkzfwmwdc7imobcymfx57lf3owbyf7we7xdem +- valory/market_creation_manager_abci:0.1.0:bafybeih2hbwpalkavcelgm2yfv4fqgldwadiyesj4rowmhzzgt3eitmshq +- valory/termination_abci:0.1.0:bafybeifw36rnniyjay4f3af6jtfxpeycm5nu4zm4ejoutsk4yh2rv24ysm +- valory/transaction_settlement_abci:0.1.0:bafybeihfrdgfhu7ijjorvktjplfa4aq3b5as4dtwmkgl6nhy2oz4ayidfu +- valory/mech_interact_abci:0.1.0:bafybeieemsqlorshzgagcevh3u5tym5e2basny6qozfeoutv5sg2a56kg4 behaviours: main: args: {} @@ -214,6 +214,12 @@ models: light_slash_unit_amount: 5000000000000000 serious_slash_unit_amount: 8000000000000000 mech_contract_address: '0x77af31de935740567cf4ff1986d04b2c964a786a' + answer_retry_intervals: + - 0 + - 86400 + - 259200 + - 604800 + - 1209600 class_name: Params randomness_api: args: @@ -264,5 +270,5 @@ models: class_name: TendermintDialogues dependencies: open-aea-test-autonomy: - version: ==0.14.8 + version: ==0.14.11.post1 is_abstract: false diff --git a/scripts/list_finalizing_markets.ipynb b/scripts/list_finalizing_markets.ipynb index ba50287..237e775 100644 --- a/scripts/list_finalizing_markets.ipynb +++ b/scripts/list_finalizing_markets.ipynb @@ -26,18 +26,241 @@ "#\n", "# ------------------------------------------------------------------------------\n", "\n", - "\"\"\"Script for listing markets.\"\"\"\n", + "\"\"\"Script for analyzing market creator markets.\"\"\"\n", "\n", "from collections import defaultdict\n", "from datetime import datetime, timedelta\n", "from enum import Enum\n", + "from mech_request_utils import get_mech_requests, IPFS_ADDRESS\n", + "from scipy import stats\n", "from string import Template\n", - "from typing import Any, Dict\n", + "from typing import Any, Dict, Optional\n", + "from IPython.display import display, HTML\n", "\n", + "import seaborn as sns\n", + "import dateutil.parser\n", + "import json\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import pandas as pd\n", + "import pytz\n", "import requests\n", "\n", + "MARKET_CREATOR_ADDRESS = \"0x89c5cc945dd550BcFfb72Fe42BfF002429F46Fec\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Process mech requests\n", + "Process mech requests from the Market Creator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mech_requests = get_mech_requests(MARKET_CREATOR_ADDRESS)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "class MechDeliverState(Enum):\n", + " \"\"\"Market state\"\"\"\n", + "\n", + " INVALID_RESPONSE = 1\n", + " TIMEOUT = 2\n", + " RESP_NOT_DETERMINABLE = 3\n", + " RESP_INVALID_MARKET = 4\n", + " RESP_YES = 5\n", + " RESP_NO = 6\n", + " UNKNOWN = 7\n", + " NA = 8\n", "\n", + " def __str__(self) -> str:\n", + " \"\"\"Prints the market status.\"\"\"\n", + " return self.name.lower()\n", + "\n", + "# HISTOGRAM OF BLOCK DELAYS\n", + "deliver_delay_blocks = []\n", + "for request_data in mech_requests.values():\n", + " if 'deliver' in request_data:\n", + " request_block = int(request_data['blockNumber'])\n", + " deliver_block = int(request_data['deliver']['blockNumber'])\n", + " delay = deliver_block - request_block\n", + " deliver_delay_blocks.append(delay)\n", + "\n", + "deliver_delay_blocks_stats = stats.describe(deliver_delay_blocks)\n", + "print(deliver_delay_blocks_stats)\n", + "\n", + "# Filter out outliers\n", + "mean = deliver_delay_blocks_stats[2]\n", + "std = np.sqrt(deliver_delay_blocks_stats[3])\n", + "alpha = 5\n", + "deliver_delay_blocks = [td for td in deliver_delay_blocks if np.abs(td - mean) <= alpha * std]\n", + "#deliver_delay_blocks = [td for td in deliver_delay_blocks]\n", + "\n", + "\n", + "# Create histogram\n", + "plt.hist(deliver_delay_blocks, bins=100, color='blue', edgecolor='black')\n", + "plt.xlabel('Deliver delay (blocks)')\n", + "plt.ylabel('Frequency')\n", + "plt.title('Block delays between Delivers and Requests')\n", + "plt.grid(True)\n", + "plt.show()\n", + "\n", + "\n", + "# HISTOGRAM OF DELIVERS PER RESPONSE\n", + "def _count_delivers_type_per_response(mech_requests: Dict[str, Any], from_timestamp: int = 0, to_timestamp: int = 2147483647) -> None:\n", + " result_counts = {}\n", + "\n", + " for request_data in mech_requests.values():\n", + " block_timestamp = int(request_data.get('blockTimestamp', 0))\n", + " if from_timestamp <= block_timestamp <= to_timestamp:\n", + " if 'deliver' in request_data:\n", + " result = request_data['deliver']['ipfsContents']['result']\n", + " try:\n", + " result_json = json.loads(result)\n", + " first_item_key = list(result_json.keys())[0]\n", + " first_item_value = result_json[first_item_key]\n", + " result = json.dumps({first_item_key: first_item_value})\n", + " result_counts[result] = result_counts.get(result, 0) + 1\n", + " except json.JSONDecodeError:\n", + " result_counts[result] = result_counts.get(result, 0) + 1\n", + " else:\n", + " result_counts[result] = result_counts.get(result, 0) + 1\n", + "\n", + " return result_counts\n", + "\n", + "\n", + "def plot_delivers_evolution_over_time(mech_requests: Dict[str, Any], from_timestamp: int, weeks: int = 1) -> None:\n", + " all_result_counts = []\n", + "\n", + " for i in range(weeks):\n", + " from_timestamp_week = from_timestamp + (i * 7 * 24 * 3600) # Add i weeks\n", + " to_timestamp_week = from_timestamp_week + (7 * 24 * 3600) # One week later\n", + " result_counts = _count_delivers_type_per_response(mech_requests, from_timestamp_week, to_timestamp_week)\n", + " all_result_counts.append(result_counts)\n", + "\n", + " result_types = set().union(*(d.keys() for d in all_result_counts))\n", + " result_counts_per_week = {result_type: [week_result_counts.get(result_type, 0) for week_result_counts in all_result_counts] for result_type in result_types}\n", + "\n", + " plt.figure(figsize=(10, 6))\n", + " weeks_range = range(1, weeks + 1)\n", + " cumulative_counts = [0] * weeks\n", + " for result_type, counts_per_week in result_counts_per_week.items():\n", + " plt.fill_between(weeks_range, cumulative_counts, [cumulative_count + current_count for cumulative_count, current_count in zip(cumulative_counts, counts_per_week)], label=result_type)\n", + " cumulative_counts = [cumulative_count + current_count for cumulative_count, current_count in zip(cumulative_counts, counts_per_week)]\n", + "\n", + " plt.title('Cumulative Deliver Evolution Over Time')\n", + " plt.xlabel('Weeks')\n", + " plt.ylabel('Cumulative Deliver Count')\n", + " plt.legend(loc=\"upper left\")\n", + " plt.grid(True)\n", + " plt.show()\n", + "\n", + "\n", + "def plot_latest_mech_requests_count(mech_requests: Dict[str, Any]) -> None:\n", + "\n", + " timestamps = [int(r[\"blockTimestamp\"]) for r in mech_requests.values()]\n", + " \n", + " sorted_timestamps = sorted(timestamps, reverse=True)\n", + " now = datetime.now().timestamp()\n", + " last_24_hours = [ts for ts in timestamps if now - ts <= 4*60*60]\n", + "\n", + " x_coords = [(now - ts) / 3600 for ts in timestamps if now - ts <= 4*60*60]\n", + " y_coord = 0 # y-coordinate for all points\n", + " \n", + " plt.figure(figsize=(12, 4))\n", + " plt.scatter(x_coords, [y_coord] * len(x_coords), marker='o')\n", + " plt.xlabel('Time Difference from Current Time (hours)')\n", + " plt.title('Latest Mech Requests Distribution in Last 24 Hours')\n", + " plt.ylim(-0.5, 0.5) # Set y-axis limits to center the points\n", + " plt.grid(True)\n", + " plt.tight_layout()\n", + " plt.show()\n", + "\n", + "def display_latest_requests(mech_requests: Dict[str, Any], N: int=20) -> None:\n", + " sorted_requests = sorted(mech_requests.values(), key=lambda x: int(x['blockTimestamp']), reverse=True)[:N]\n", + "\n", + " data = {\n", + " \"Request ID\": [],\n", + " \"Prompt\": [],\n", + " \"Block Timestamp (UTC)\": [],\n", + " \"Result\": [],\n", + " \"Deliver Block Timestamp (UTC)\": []\n", + " }\n", + "\n", + " for request in sorted_requests:\n", + " data[\"Request ID\"].append(request['requestId'])\n", + " data[\"Prompt\"].append(request['ipfsContents']['prompt'])\n", + " data[\"Block Timestamp (UTC)\"].append(datetime.utcfromtimestamp(int(request['blockTimestamp'])))\n", + " if 'deliver' in request:\n", + " deliver = request['deliver']\n", + " data[\"Result\"].append(deliver['ipfsContents']['result'])\n", + " data[\"Deliver Block Timestamp (UTC)\"].append(datetime.utcfromtimestamp(int(deliver['blockTimestamp'])))\n", + " else:\n", + " data[\"Result\"].append(None)\n", + " data[\"Deliver Block Timestamp (UTC)\"].append(None)\n", + "\n", + " df = pd.DataFrame(data)\n", + " display(HTML(df.to_html()))\n", + "\n", + "display_latest_requests(mech_requests)\n", + "plot_latest_mech_requests_count(mech_requests)\n", + "\n", + "from_date = \"2024-04-01T00:00:00Z\"\n", + "to_date = \"2024-04-25T23:59:59Z\"\n", + "\n", + "from_timestamp = int(dateutil.parser.isoparse(from_date).replace(tzinfo=pytz.utc).timestamp())\n", + "to_timestamp = int(dateutil.parser.isoparse(to_date).replace(tzinfo=pytz.utc).timestamp())\n", + "\n", + "result_counts = _count_delivers_type_per_response(mech_requests, from_timestamp, to_timestamp)\n", + "\n", + "# Calculate total count\n", + "total_count = sum(result_counts.values())\n", + "\n", + "# Separate result types and counts\n", + "result_types = list(result_counts.keys())\n", + "counts = list(result_counts.values())\n", + "\n", + "# Plotting\n", + "plt.figure(figsize=(10, 6))\n", + "plt.title(f'Mech delivers results from {from_date} to {to_date}')\n", + "plt.axis('equal')\n", + "plt.pie(counts, labels=None, autopct=lambda pct: f'{pct:.1f}% ({int(np.round(pct / 100. * total_count))})', startangle=140)\n", + "plt.legend(result_types, loc=\"center left\", bbox_to_anchor=(1, 0.5))\n", + "plt.tight_layout()\n", + "plt.show()\n", + "\n", + "\n", + "from_date = \"2024-02-19T00:00:00Z\"\n", + "from_timestamp = int(dateutil.parser.isoparse(from_date).replace(tzinfo=pytz.utc).timestamp())\n", + "\n", + "plot_delivers_evolution_over_time(mech_requests, from_timestamp, weeks=12) # Plot evolution for 4 weeks\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Process Omen markets\n", + "Process Omen markets by reading from the Omen xDAI Subgraph" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "class MarketState(Enum):\n", " \"\"\"Market state\"\"\"\n", "\n", @@ -166,7 +389,7 @@ " return answer_mapping.get(currentAnswer.lower(), \"--\")\n", "\n", "\n", - "def _get_num_answers(market) -> int:\n", + "def _get_num_answers(market: Dict[str, Any]) -> int:\n", " question = market.get(\"question\", {})\n", " if not question:\n", " question = {}\n", @@ -201,11 +424,105 @@ " output = {\"data\": {\"fixedProductMarketMakers\": all_markets}}\n", " return output\n", "\n", - "def _make_clickable(text: str, url: str) -> str:\n", + "def _make_clickable(text: str, url: str, tooltip: Optional[str]=None) -> str:\n", + " if tooltip:\n", + " return f'{text}'\n", " return f'{text}'\n", - "\n", - "def _generate_markets_df() -> pd.DataFrame:\n", - " data = _execute_fpmm_query()\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def _populate_with_mech_requests(data: Dict[str, Any], mech_requests: Dict[str, Any]) -> None:\n", + " \"\"\"Populates the markets by appending an array of the associated mech requests\"\"\"\n", + " markets = data[\"data\"][\"fixedProductMarketMakers\"]\n", + " markets_dict = {market[\"question\"].get(\"title\", \"\"): market for market in markets if market.get(\"question\")}\n", + "\n", + " for mech_request in mech_requests.values():\n", + " if \"ipfsContents\" in mech_request:\n", + " q = mech_request[\"ipfsContents\"][\"prompt\"]\n", + " if q in markets_dict:\n", + " markets_dict[q].setdefault(\"mechRequests\", []).append(mech_request)\n", + " else:\n", + " print(\"Market not found for mech request: {mech_request}\")\n", + " \n", + " for market in markets:\n", + " market.setdefault(\"mechRequests\", []).sort(key=lambda x: x.get(\"blockNumber\", 0), reverse=True)\n", + "\n", + "def _get_last_mech_deliver_link(market: Dict[str, Any], latest: Optional[int]=None) -> str:\n", + " if market[\"mechRequests\"]:\n", + " output = \"\"\n", + " n = len(market[\"mechRequests\"])\n", + " if not latest:\n", + " latest = n\n", + " for i in range(min(latest, n)):\n", + " request = market[\"mechRequests\"][i]\n", + " if \"deliver\" in request:\n", + " deliver = request[\"deliver\"]\n", + " ipfs_hash = deliver.get(\"ipfsHash\", \"\")\n", + " request_id = deliver.get(\"requestId\", \"\")\n", + " url = f\"{IPFS_ADDRESS}{ipfs_hash}/{request_id}\"\n", + " output += _make_clickable(f\"[{n-i}]\", url, _get_date(deliver[\"blockTimestamp\"]))\n", + " else:\n", + " output += \"[X]\"\n", + " output += \" \"\n", + " \n", + " return output\n", + " \n", + " return \"-\"\n", + "\n", + "def _get_num_mech_requests(market: Dict[str, Any]) -> int:\n", + " if market[\"mechRequests\"]:\n", + " return len(market[\"mechRequests\"])\n", + " return 0\n", + "\n", + "\n", + "def _get_last_mech_deliver_state(market: Dict[str, Any]) -> str:\n", + " if not market[\"mechRequests\"]:\n", + " return MechDeliverState.NA\n", + "\n", + " request = market[\"mechRequests\"][0]\n", + " if \"deliver\" not in request:\n", + " return MechDeliverState.UNKNOWN\n", + "\n", + " result = request[\"deliver\"][\"ipfsContents\"][\"result\"]\n", + " \n", + " if result == \"Invalid response\":\n", + " return MechDeliverState.INVALID_RESPONSE\n", + " if result.startswith(\"Task timed out\"):\n", + " return MechDeliverState.TIMEOUT\n", + "\n", + " result = result.replace(\" \", \"\")\n", + " if result == \"{\\\"is_determinable\\\":false}\":\n", + " return MechDeliverState.RESP_NOT_DETERMINABLE\n", + " if result == \"{\\\"has_occurred\\\":false}\":\n", + " return MechDeliverState.RESP_NO\n", + " if result == \"{\\\"has_occurred\\\":true}\":\n", + " return MechDeliverState.RESP_YES\n", + " if result.startswith(\"{\\\"is_valid\\\":false\"):\n", + " return MechDeliverState.RESP_INVALID_MARKET\n", + "\n", + " return MechDeliverState.UNKNOWN\n", + "\n", + "\n", + "def _get_mech_deliver_delay(market: Dict[str, Any]) -> int:\n", + " if not market[\"mechRequests\"]:\n", + " return -1\n", + "\n", + " request = market[\"mechRequests\"][0]\n", + " if \"deliver\" not in request:\n", + " return -1\n", + "\n", + " deliver = request[\"deliver\"]\n", + "\n", + " return int(deliver[\"blockTimestamp\"]) - int(request[\"blockTimestamp\"])\n", + "\n", + "def _generate_markets_df(data, mech_requests) -> pd.DataFrame:\n", + " _populate_with_mech_requests(data, mech_requests)\n", " rows = []\n", " for entry in data[\"data\"][\"fixedProductMarketMakers\"]:\n", " rows.append(\n", @@ -220,10 +537,23 @@ " \"Opening (UTC)\": _get_date(entry[\"openingTimestamp\"]),\n", " \"Answer finalized (UTC)\": _get_date(entry[\"answerFinalizedTimestamp\"]),\n", " \"Resolution (UTC)\": _get_date(entry[\"resolutionTimestamp\"]),\n", + " \"Num. mech requests\": _get_num_mech_requests(entry),\n", + " \"Mech delivers\": _get_last_mech_deliver_link(entry),\n", + " \"Deliver delay\": _get_mech_deliver_delay(entry),\n", + " \"Mech deliver state\": _get_last_mech_deliver_state(entry)\n", " }\n", " )\n", "\n", - " return pd.DataFrame(rows)" + " df = pd.DataFrame(rows)\n", + " df['State'] = df['State'].astype('category')\n", + " df['Mech deliver state'] = df['Mech deliver state'].astype('category')\n", + " df.sort_values(by=\"Creation (UTC)\", ascending=True, inplace=True)\n", + " return df\n", + "\n", + "data = _execute_fpmm_query()\n", + "df = _generate_markets_df(data, mech_requests)\n", + "df.style.set_properties(subset=[\"Title\"], **{'text-align': 'right'})\n", + "print(\"\")" ] }, { @@ -232,27 +562,110 @@ "metadata": {}, "outputs": [], "source": [ - "from IPython.display import display, HTML\n", - "df = _generate_markets_df()\n", + "\n", + "def plot_opened_markets(data: Dict[str, Any]) -> None:\n", + "\n", + " markets = data[\"data\"][\"fixedProductMarketMakers\"]\n", + " current_utc_timestamp = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)\n", + " \n", + " day_counts = {}\n", + "\n", + " for item in markets:\n", + " if item['creationTimestamp'] is not None:\n", + " creation_timestamp = int(item['creationTimestamp'])\n", + " creation_datetime = datetime.utcfromtimestamp(creation_timestamp)\n", + " \n", + " days_diff = (creation_datetime - current_utc_timestamp).days\n", + "\n", + " if -days_diff <= 30:\n", + " day_counts[days_diff] = day_counts.get(days_diff, 0) + 1\n", + "\n", + " sorted_counts = dict(sorted(day_counts.items()))\n", + "\n", + " days = list(sorted_counts.keys())\n", + " counts = list(sorted_counts.values())\n", + "\n", + " # Plot histogram\n", + " plt.figure(figsize=(10, 6))\n", + " plt.bar(days, counts, width=0.8, color='skyblue')\n", + " plt.title('Markets Opened per Day (Last 30 Days)')\n", + " plt.xlabel('Days Ago (0 is today)')\n", + " plt.ylabel('Number of Markets')\n", + " plt.xticks(rotation=0, ha='right')\n", + " plt.tight_layout()\n", + " plt.show()\n", + "\n", + "\n", + "def plot_market_state_count(df: pd.DataFrame) -> None:\n", + " state_counts = df[\"State\"].value_counts()\n", + "\n", + " # Plot pie chart\n", + " plt.figure(figsize=(8, 8))\n", + " plt.pie(state_counts, labels=state_counts.index, autopct='%1.1f%%', startangle=140)\n", + " plt.title('Market State Counts')\n", + " plt.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n", + "\n", + " # Display legend next to the pie chart\n", + " plt.legend(loc='center left', bbox_to_anchor=(1, 0.5), title=\"States\", fancybox=True)\n", + "\n", + " plt.show()\n", + "\n", + "\n", + "def format_seconds(seconds: int) -> str:\n", + " if seconds < 0:\n", + " return \"--\"\n", + " hours = seconds // 3600\n", + " minutes = (seconds % 3600) // 60\n", + " #seconds = seconds % 60\n", + " #return f\"{int(hours):02}h {int(minutes):02}m {int(seconds):02}s\"\n", + " return str(seconds)\n", + "\n", + "plot_market_state_count(df)\n", + "plot_opened_markets(data)\n", + "\n", + "\n", + "\n", + "\n", + "print(\"\\nMech requests per state:\")\n", + "print(df.groupby('State')['Num. mech requests'].sum())\n", + "\n", + "\n", + "\n", + "filtered_df = df[df['Mech deliver state'] != MechDeliverState.NA]\n", + "\n", + "# Generate the contingency table\n", + "contingency_table = pd.crosstab(filtered_df['State'], filtered_df['Mech deliver state'])\n", + "\n", + "# Plotting the heatmap\n", + "plt.figure(figsize=(10, 6))\n", + "sns.heatmap(contingency_table, annot=True, cmap=\"YlGnBu\", fmt=\"d\")\n", + "plt.title('Contingency Table of Market States vs. Latest Mech Deliver State')\n", + "plt.xlabel('Mech Answer State')\n", + "plt.ylabel('Market State')\n", + "plt.show()\n", + "\n", + "\n", "\n", "# Select markets to display\n", "market_states = [\n", " #MarketState.OPEN,\n", " MarketState.PENDING,\n", - " #MarketState.FINALIZING,\n", + " MarketState.FINALIZING,\n", " #MarketState.ARBITRATING,\n", " #MarketState.CLOSED,\n", " #MarketState.UNKNOWN\n", "]\n", "\n", - "df.style.set_properties(subset=[\"Title\"], **{'text-align': 'right'})\n", - "\n", + "print(f\"Filtering {market_states} markets\")\n", "df_filtered = df[df[\"State\"].isin(market_states)]\n", "pd.options.display.max_colwidth = 150\n", - "print(f\"Total {[str(s) for s in market_states]} markets: {len(df_filtered)}\")\n", - "html = df_filtered.to_html(escape=False, classes='sortable')\n", + "html = df_filtered.to_html(\n", + " escape=False,\n", + " classes='sortable',\n", + " formatters={'Deliver delay': format_seconds}\n", + " )\n", "\n", - "display(HTML(df_filtered.to_html(escape=False)))\n" + "display(HTML(html))" ] } ], diff --git a/scripts/mech_request_utils.py b/scripts/mech_request_utils.py new file mode 100644 index 0000000..851a273 --- /dev/null +++ b/scripts/mech_request_utils.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + +"""Script for retrieving mech requests and their delivers.""" + +import json +import time +from collections import defaultdict +from typing import Any, Dict, List + +import requests +from gql import Client, gql +from gql.transport.requests import RequestsHTTPTransport +from tqdm import tqdm + + +TEXT_ALIGNMENT = 30 +MINIMUM_WRITE_FILE_DELAY_SECONDS = 20 +MECH_FROM_BLOCK_RANGE = 50000 +MECH_REQUESTS_JSON_PATH = "mech_requests.json" +IPFS_ADDRESS = "https://gateway.autonolas.tech/ipfs/" +THEGRAPH_ENDPOINT = "https://api.studio.thegraph.com/query/57238/mech/0.0.2" + +REQUESTS_QUERY = """ +query requests_query($sender: Bytes, $id_gt: Bytes) { + requests(where: {sender: $sender, id_gt: $id_gt}, orderBy: id, first: 1000) { + blockNumber + blockTimestamp + id + ipfsHash + requestId + sender + transactionHash + } +} +""" + +DELIVERS_QUERY = """ +query delivers_query($requestId: BigInt, $blockNumber_gte: BigInt, $blockNumber_lte: BigInt) { + delivers(where: {requestId: $requestId, blockNumber_gte: $blockNumber_gte, blockNumber_lte: $blockNumber_lte}, orderBy: blockNumber, first: 1000) { + blockNumber + blockTimestamp + id + ipfsHash + requestId + sender + transactionHash + } +} +""" + + +def _populate_missing_requests(sender: str, mech_requests: Dict[str, Any]) -> None: + print(f"{'Fetching requests...':>{TEXT_ALIGNMENT}}") + + transport = RequestsHTTPTransport(url=THEGRAPH_ENDPOINT) + client = Client(transport=transport, fetch_schema_from_transport=True) + + id_gt = "0x00" + while True: + variables = { + "sender": sender, + "id_gt": id_gt, + } + response = client.execute(gql(REQUESTS_QUERY), variable_values=variables) + items = response.get("requests", []) + + if not items: + break + + for mech_request in items: + if mech_request["id"] not in mech_requests: + mech_requests[mech_request["id"]] = mech_request + + id_gt = items[-1]["id"] + _write_mech_events_to_file(mech_requests) + + _write_mech_events_to_file(mech_requests, True) + + +def _populate_missing_responses(mech_requests: Dict[str, Any]) -> None: + transport = RequestsHTTPTransport(url=THEGRAPH_ENDPOINT) + client = Client(transport=transport, fetch_schema_from_transport=True) + + for _, mech_request in tqdm( + mech_requests.items(), + desc=f"{'Fetching responses':>{TEXT_ALIGNMENT}}", + miniters=1, + ): + if "deliver" in mech_request: + continue + + variables = { + "requestId": mech_request["requestId"], + "blockNumber_gte": mech_request["blockNumber"], + "blockNumber_lte": str( + int(mech_request["blockNumber"]) + MECH_FROM_BLOCK_RANGE + ), + } + response = client.execute(gql(DELIVERS_QUERY), variable_values=variables) + items = response.get("delivers") + + # If the user sends requests with the same values (tool, prompt, nonce) it + # will generate the same requestId. Therefore, multiple items can be retrieved + # at this point. We assume the most likely deliver to this request is the + # one with the closest blockNumber among all delivers with the same requestId. + if items: + mech_request["deliver"] = items[0] + + _write_mech_events_to_file(mech_requests) + + _write_mech_events_to_file(mech_requests, True) + + +def _populate_missing_ipfs_contents(mech_requests: Dict[str, Any]) -> None: + for _, mech_request in tqdm( + mech_requests.items(), + desc=f"{'Fetching IPFS contents':>{TEXT_ALIGNMENT}}", + miniters=1, + ): + if "ipfsContents" not in mech_request: + ipfs_hash = mech_request["ipfsHash"] + url = f"{IPFS_ADDRESS}{ipfs_hash}/metadata.json" + response = requests.get(url) + response.raise_for_status() + mech_request["ipfsContents"] = response.json() + + if "deliver" not in mech_request: + continue + + deliver = mech_request["deliver"] + if "ipfsContents" not in deliver: + ipfs_hash = deliver["ipfsHash"] + request_id = deliver["requestId"] + url = f"{IPFS_ADDRESS}{ipfs_hash}/{request_id}" + response = requests.get(url) + response.raise_for_status() + deliver["ipfsContents"] = response.json() + + _write_mech_events_to_file(mech_requests) + + _write_mech_events_to_file(mech_requests, True) + + +def _find_duplicate_delivers( + mech_requests: Dict[str, Any] +) -> Dict[str, List[Dict[str, Any]]]: + requests_with_duplicate_deliver_ids = defaultdict(list) + + for _, r in tqdm( + mech_requests.items(), + desc=f"{'Finding duplicate delivers':>{TEXT_ALIGNMENT}}", + miniters=1, + ): + if "deliver" in r: + requests_with_duplicate_deliver_ids[r["deliver"]["id"]].append(r) + + for k in list(requests_with_duplicate_deliver_ids.keys()): + if len(requests_with_duplicate_deliver_ids[k]) == 1: + del requests_with_duplicate_deliver_ids[k] + + print( + f"Duplicate deliver ids found: {len(requests_with_duplicate_deliver_ids.keys())}" + ) + return requests_with_duplicate_deliver_ids + + +def _process_duplicate_delivers(mech_requests: Dict[str, Any]) -> None: + requests_with_duplicate_deliver_ids = _find_duplicate_delivers(mech_requests) + for mech_requests_list in tqdm( + requests_with_duplicate_deliver_ids.values(), + desc=f"{'Processing duplicate delivers':>{TEXT_ALIGNMENT}}", + miniters=1, + ): + min_difference_request = min( + mech_requests_list, + key=lambda x: int(x["deliver"]["blockNumber"]) - int(x["blockNumber"]), + ) + for mech_request in mech_requests_list: + if mech_request is not min_difference_request: + mech_request.pop("deliver", None) + + _write_mech_events_to_file(mech_requests, True) + + +last_write_time = 0.0 + + +def _write_mech_events_to_file( + mech_requests: Dict[str, Any], force_write: bool = False +) -> None: + global last_write_time # pylint: disable=global-statement + now = time.time() + + if force_write or (now - last_write_time) >= MINIMUM_WRITE_FILE_DELAY_SECONDS: + with open(MECH_REQUESTS_JSON_PATH, "w", encoding="utf-8") as file: + json.dump({"mechRequests": mech_requests}, file, indent=2, sort_keys=True) + last_write_time = now + + +def get_mech_requests(sender: str) -> Dict[str, Any]: + """Get Mech requests populated with the associated response and IPFS contents.""" + mech_requests = {} + try: + with open(MECH_REQUESTS_JSON_PATH, "r", encoding="UTF-8") as json_file: + existing_data = json.load(json_file) + mech_requests = existing_data.get("mechRequests", {}) + except FileNotFoundError: + pass # File doesn't exist yet, so there are no existing requests + + _populate_missing_requests(sender.lower(), mech_requests) + _populate_missing_responses(mech_requests) + _process_duplicate_delivers(mech_requests) + _find_duplicate_delivers(mech_requests) + _populate_missing_ipfs_contents(mech_requests) + return mech_requests